mirror of
				https://github.com/actions/cache.git
				synced 2025-10-31 23:36:22 +07:00 
			
		
		
		
	Compare commits
	
		
			3 Commits
		
	
	
		
			joshmgross
			...
			joshmgross
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| b767a42249 | |||
| 1c5b02ee04 | |||
| 7352daed78 | 
							
								
								
									
										21
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										21
									
								
								README.md
									
									
									
									
									
								
							| @ -54,26 +54,9 @@ jobs: | |||||||
|       run: /primes.sh -d prime-numbers |       run: /primes.sh -d prime-numbers | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ## Implementation Examples | ## Ecosystem Examples | ||||||
|  |  | ||||||
| Every programming language and framework has it's own way of caching.  |  | ||||||
|  |  | ||||||
| See [Examples](examples.md) for a list of `actions/cache` implementations for use with: |  | ||||||
|  |  | ||||||
| - [C# - Nuget](./examples.md#c---nuget) |  | ||||||
| - [Elixir - Mix](./examples.md#elixir---mix) |  | ||||||
| - [Go - Modules](./examples.md#go---modules) |  | ||||||
| - [Java - Gradle](./examples.md#java---gradle) |  | ||||||
| - [Java - Maven](./examples.md#java---maven) |  | ||||||
| - [Node - npm](./examples.md#node---npm) |  | ||||||
| - [Node - Yarn](./examples.md#node---yarn) |  | ||||||
| - [PHP - Composer](./examples.md#php---composer) |  | ||||||
| - [Python - pip](./examples.md#python---pip) |  | ||||||
| - [Ruby - Gem](./examples.md#ruby---gem) |  | ||||||
| - [Rust - Cargo](./examples.md#rust---cargo) |  | ||||||
| - [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage) |  | ||||||
| - [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods) |  | ||||||
|  |  | ||||||
|  | See [Examples](examples.md) | ||||||
|  |  | ||||||
| ## Cache Limits | ## Cache Limits | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,18 +1,16 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as exec from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||||
| import { Events, Inputs } from "../src/constants"; | import { Events, Inputs } from "../src/constants"; | ||||||
| import { ArtifactCacheEntry } from "../src/contracts"; | import { ArtifactCacheEntry } from "../src/contracts"; | ||||||
| import run from "../src/restore"; | import run from "../src/restore"; | ||||||
|  | import * as tar from "../src/tar"; | ||||||
| import * as actionUtils from "../src/utils/actionUtils"; | import * as actionUtils from "../src/utils/actionUtils"; | ||||||
| import * as testUtils from "../src/utils/testUtils"; | import * as testUtils from "../src/utils/testUtils"; | ||||||
|  |  | ||||||
| jest.mock("@actions/exec"); |  | ||||||
| jest.mock("@actions/io"); |  | ||||||
| jest.mock("../src/utils/actionUtils"); |  | ||||||
| jest.mock("../src/cacheHttpClient"); | jest.mock("../src/cacheHttpClient"); | ||||||
|  | jest.mock("../src/tar"); | ||||||
|  | jest.mock("../src/utils/actionUtils"); | ||||||
|  |  | ||||||
| beforeAll(() => { | beforeAll(() => { | ||||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { |     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||||
| @ -35,10 +33,6 @@ beforeAll(() => { | |||||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); |         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||||
|         return actualUtils.getSupportedEvents(); |         return actualUtils.getSupportedEvents(); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(io, "which").mockImplementation(tool => { |  | ||||||
|         return Promise.resolve(tool); |  | ||||||
|     }); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| beforeEach(() => { | beforeEach(() => { | ||||||
| @ -245,8 +239,7 @@ test("restore with cache found", async () => { | |||||||
|         .spyOn(actionUtils, "getArchiveFileSize") |         .spyOn(actionUtils, "getArchiveFileSize") | ||||||
|         .mockReturnValue(fileSize); |         .mockReturnValue(fileSize); | ||||||
|  |  | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
| @ -255,27 +248,11 @@ test("restore with cache found", async () => { | |||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); |     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( |     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); | ||||||
|         cacheEntry.archiveLocation, |  | ||||||
|         archivePath |  | ||||||
|     ); |  | ||||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); |     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); |  | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-xz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/") |  | ||||||
|           ] |  | ||||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||||
| @ -326,8 +303,7 @@ test("restore with a pull request event and cache found", async () => { | |||||||
|         .spyOn(actionUtils, "getArchiveFileSize") |         .spyOn(actionUtils, "getArchiveFileSize") | ||||||
|         .mockReturnValue(fileSize); |         .mockReturnValue(fileSize); | ||||||
|  |  | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
| @ -336,28 +312,12 @@ test("restore with a pull request event and cache found", async () => { | |||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); |     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( |     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); | ||||||
|         cacheEntry.archiveLocation, |  | ||||||
|         archivePath |  | ||||||
|     ); |  | ||||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); |     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); |     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); |  | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-xz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/") |  | ||||||
|           ] |  | ||||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||||
| @ -408,8 +368,7 @@ test("restore with cache found for restore key", async () => { | |||||||
|         .spyOn(actionUtils, "getArchiveFileSize") |         .spyOn(actionUtils, "getArchiveFileSize") | ||||||
|         .mockReturnValue(fileSize); |         .mockReturnValue(fileSize); | ||||||
|  |  | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
| @ -418,28 +377,12 @@ test("restore with cache found for restore key", async () => { | |||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); |     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( |     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); | ||||||
|         cacheEntry.archiveLocation, |  | ||||||
|         archivePath |  | ||||||
|     ); |  | ||||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); |     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); |     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); |  | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-xz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/") |  | ||||||
|           ] |  | ||||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||||
|  | |||||||
| @ -1,19 +1,17 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as exec from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||||
| import { Events, Inputs } from "../src/constants"; | import { Events, Inputs } from "../src/constants"; | ||||||
| import { ArtifactCacheEntry } from "../src/contracts"; | import { ArtifactCacheEntry } from "../src/contracts"; | ||||||
| import run from "../src/save"; | import run from "../src/save"; | ||||||
|  | import * as tar from "../src/tar"; | ||||||
| import * as actionUtils from "../src/utils/actionUtils"; | import * as actionUtils from "../src/utils/actionUtils"; | ||||||
| import * as testUtils from "../src/utils/testUtils"; | import * as testUtils from "../src/utils/testUtils"; | ||||||
|  |  | ||||||
| jest.mock("@actions/core"); | jest.mock("@actions/core"); | ||||||
| jest.mock("@actions/exec"); |  | ||||||
| jest.mock("@actions/io"); |  | ||||||
| jest.mock("../src/utils/actionUtils"); |  | ||||||
| jest.mock("../src/cacheHttpClient"); | jest.mock("../src/cacheHttpClient"); | ||||||
|  | jest.mock("../src/tar"); | ||||||
|  | jest.mock("../src/utils/actionUtils"); | ||||||
|  |  | ||||||
| beforeAll(() => { | beforeAll(() => { | ||||||
|     jest.spyOn(core, "getInput").mockImplementation((name, options) => { |     jest.spyOn(core, "getInput").mockImplementation((name, options) => { | ||||||
| @ -49,10 +47,6 @@ beforeAll(() => { | |||||||
|     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { |     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { | ||||||
|         return Promise.resolve("/foo/bar"); |         return Promise.resolve("/foo/bar"); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(io, "which").mockImplementation(tool => { |  | ||||||
|         return Promise.resolve(tool); |  | ||||||
|     }); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| beforeEach(() => { | beforeEach(() => { | ||||||
| @ -128,7 +122,7 @@ test("save with exact match returns early", async () => { | |||||||
|             return primaryKey; |             return primaryKey; | ||||||
|         }); |         }); | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
| @ -136,7 +130,7 @@ test("save with exact match returns early", async () => { | |||||||
|         `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` |         `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(0); |     expect(createTarMock).toHaveBeenCalledTimes(0); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
| @ -198,9 +192,9 @@ test("save with large cache outputs warning", async () => { | |||||||
|     const cachePath = path.resolve(inputPath); |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit |     const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit | ||||||
|     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { |     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { | ||||||
|         return cacheSize; |         return cacheSize; | ||||||
|     }); |     }); | ||||||
| @ -209,25 +203,12 @@ test("save with large cache outputs warning", async () => { | |||||||
|  |  | ||||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|             "-cz", |  | ||||||
|             "--force-local", |  | ||||||
|             "-f", |  | ||||||
|             archivePath.replace(/\\/g, "/"), |  | ||||||
|             "-C", |  | ||||||
|             cachePath.replace(/\\/g, "/"), |  | ||||||
|             "." |  | ||||||
|         ] |  | ||||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(logWarningMock).toHaveBeenCalledWith( |     expect(logWarningMock).toHaveBeenCalledWith( | ||||||
|         "Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache." |         "Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache." | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| @ -259,12 +240,7 @@ test("save with server error outputs warning", async () => { | |||||||
|     const cachePath = path.resolve(inputPath); |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const cacheId = 4; |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|     const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { |  | ||||||
|         return Promise.resolve(cacheId); |  | ||||||
|     }); |  | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|  |  | ||||||
|     const saveCacheMock = jest |     const saveCacheMock = jest | ||||||
|         .spyOn(cacheHttpClient, "saveCache") |         .spyOn(cacheHttpClient, "saveCache") | ||||||
| @ -274,29 +250,13 @@ test("save with server error outputs warning", async () => { | |||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); |  | ||||||
|  |  | ||||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|             "-cz", |  | ||||||
|             "--force-local", |  | ||||||
|             "-f", |  | ||||||
|             archivePath.replace(/\\/g, "/"), |  | ||||||
|             "-C", |  | ||||||
|             cachePath.replace(/\\/g, "/"), |  | ||||||
|             "." |  | ||||||
|         ] |  | ||||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); |     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); |     expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); | ||||||
|  |  | ||||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); |     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||||
| @ -329,40 +289,18 @@ test("save with valid inputs uploads a cache", async () => { | |||||||
|     const cachePath = path.resolve(inputPath); |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const cacheId = 4; |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|     const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { |  | ||||||
|         return Promise.resolve(cacheId); |  | ||||||
|     }); |  | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|  |  | ||||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); |     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); |  | ||||||
|  |  | ||||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|             "-cz", |  | ||||||
|             "--force-local", |  | ||||||
|             "-f", |  | ||||||
|             archivePath.replace(/\\/g, "/"), |  | ||||||
|             "-C", |  | ||||||
|             cachePath.replace(/\\/g, "/"), |  | ||||||
|             "." |  | ||||||
|         ] |  | ||||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); |     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); |     expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  | |||||||
							
								
								
									
										60
									
								
								__tests__/tar.test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								__tests__/tar.test.ts
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,60 @@ | |||||||
|  | import * as exec from "@actions/exec"; | ||||||
|  | import * as io from "@actions/io"; | ||||||
|  | import * as tar from "../src/tar"; | ||||||
|  |  | ||||||
|  | jest.mock("@actions/exec"); | ||||||
|  | jest.mock("@actions/io"); | ||||||
|  |  | ||||||
|  | beforeAll(() => { | ||||||
|  |     process.env["windir"] = "C:"; | ||||||
|  |  | ||||||
|  |     jest.spyOn(io, "which").mockImplementation(tool => { | ||||||
|  |         return Promise.resolve(tool); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | afterAll(() => { | ||||||
|  |     delete process.env["windir"]; | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test("extract tar", async () => { | ||||||
|  |     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||||
|  |     const execMock = jest.spyOn(exec, "exec"); | ||||||
|  |  | ||||||
|  |     const archivePath = "cache.tar"; | ||||||
|  |     const targetDirectory = "~/.npm/cache"; | ||||||
|  |     await tar.extractTar(archivePath, targetDirectory); | ||||||
|  |  | ||||||
|  |     expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); | ||||||
|  |  | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |     const tarPath = IS_WINDOWS ? "C:\\System32\\tar.exe" : "tar"; | ||||||
|  |     expect(execMock).toHaveBeenCalledTimes(1); | ||||||
|  |     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||||
|  |         "-xz", | ||||||
|  |         "-f", | ||||||
|  |         archivePath, | ||||||
|  |         "-C", | ||||||
|  |         targetDirectory | ||||||
|  |     ]); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test("create tar", async () => { | ||||||
|  |     const execMock = jest.spyOn(exec, "exec"); | ||||||
|  |  | ||||||
|  |     const archivePath = "cache.tar"; | ||||||
|  |     const sourceDirectory = "~/.npm/cache"; | ||||||
|  |     await tar.createTar(archivePath, sourceDirectory); | ||||||
|  |  | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |     const tarPath = IS_WINDOWS ? "C:\\System32\\tar.exe" : "tar"; | ||||||
|  |     expect(execMock).toHaveBeenCalledTimes(1); | ||||||
|  |     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||||
|  |         "-cz", | ||||||
|  |         "-f", | ||||||
|  |         archivePath, | ||||||
|  |         "-C", | ||||||
|  |         sourceDirectory, | ||||||
|  |         "." | ||||||
|  |     ]); | ||||||
|  | }); | ||||||
							
								
								
									
										227
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										227
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1496,55 +1496,47 @@ const fs = __importStar(__webpack_require__(747)); | |||||||
| const Handlers_1 = __webpack_require__(941); | const Handlers_1 = __webpack_require__(941); | ||||||
| const HttpClient_1 = __webpack_require__(874); | const HttpClient_1 = __webpack_require__(874); | ||||||
| const RestClient_1 = __webpack_require__(105); | const RestClient_1 = __webpack_require__(105); | ||||||
| const utils = __importStar(__webpack_require__(443)); | function getCacheUrl() { | ||||||
| function isSuccessStatusCode(statusCode) { |  | ||||||
|     return statusCode >= 200 && statusCode < 300; |  | ||||||
| } |  | ||||||
| function getCacheApiUrl() { |  | ||||||
|     // Ideally we just use ACTIONS_CACHE_URL |     // Ideally we just use ACTIONS_CACHE_URL | ||||||
|     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || |     const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||||
|         process.env["ACTIONS_RUNTIME_URL"] || |         process.env["ACTIONS_RUNTIME_URL"] || | ||||||
|         "").replace("pipelines", "artifactcache"); |         "").replace("pipelines", "artifactcache"); | ||||||
|     if (!baseUrl) { |     if (!cacheUrl) { | ||||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); |         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||||
|     } |     } | ||||||
|     core.debug(`Cache Url: ${baseUrl}`); |     core.debug(`Cache Url: ${cacheUrl}`); | ||||||
|     return `${baseUrl}_apis/artifactcache/`; |     return cacheUrl; | ||||||
| } | } | ||||||
| function createAcceptHeader(type, apiVersion) { | function createAcceptHeader(type, apiVersion) { | ||||||
|     return `${type};api-version=${apiVersion}`; |     return `${type};api-version=${apiVersion}`; | ||||||
| } | } | ||||||
| function getRequestOptions() { | function getRequestOptions() { | ||||||
|     const requestOptions = { |     const requestOptions = { | ||||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") |         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||||
|     }; |     }; | ||||||
|     return requestOptions; |     return requestOptions; | ||||||
| } | } | ||||||
| function createRestClient() { |  | ||||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |  | ||||||
|     const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); |  | ||||||
|     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ |  | ||||||
|         bearerCredentialHandler |  | ||||||
|     ]); |  | ||||||
| } |  | ||||||
| function getCacheEntry(keys) { | function getCacheEntry(keys) { | ||||||
|     var _a; |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const restClient = createRestClient(); |         const cacheUrl = getCacheUrl(); | ||||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; |         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|  |         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||||
|  |         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||||
|  |         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ | ||||||
|  |             bearerCredentialHandler | ||||||
|  |         ]); | ||||||
|         const response = yield restClient.get(resource, getRequestOptions()); |         const response = yield restClient.get(resource, getRequestOptions()); | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (!isSuccessStatusCode(response.statusCode)) { |         if (response.statusCode !== 200) { | ||||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); |             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
|         } |         } | ||||||
|         const cacheResult = response.result; |         const cacheResult = response.result; | ||||||
|         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; |         if (!cacheResult || !cacheResult.archiveLocation) { | ||||||
|         if (!cacheDownloadUrl) { |  | ||||||
|             throw new Error("Cache not found."); |             throw new Error("Cache not found."); | ||||||
|         } |         } | ||||||
|         core.setSecret(cacheDownloadUrl); |         core.setSecret(cacheResult.archiveLocation); | ||||||
|         core.debug(`Cache Result:`); |         core.debug(`Cache Result:`); | ||||||
|         core.debug(JSON.stringify(cacheResult)); |         core.debug(JSON.stringify(cacheResult)); | ||||||
|         return cacheResult; |         return cacheResult; | ||||||
| @ -1560,102 +1552,34 @@ function pipeResponseToStream(response, stream) { | |||||||
|         }); |         }); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function downloadCache(archiveLocation, archivePath) { | function downloadCache(cacheEntry, archivePath) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const stream = fs.createWriteStream(archivePath); |         const stream = fs.createWriteStream(archivePath); | ||||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); |         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||||
|         const downloadResponse = yield httpClient.get(archiveLocation); |         // eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||||||
|  |         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); | ||||||
|         yield pipeResponseToStream(downloadResponse, stream); |         yield pipeResponseToStream(downloadResponse, stream); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.downloadCache = downloadCache; | exports.downloadCache = downloadCache; | ||||||
| // Reserve Cache | function saveCache(key, archivePath) { | ||||||
| function reserveCache(key) { |  | ||||||
|     var _a, _b, _c; |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const restClient = createRestClient(); |         const stream = fs.createReadStream(archivePath); | ||||||
|         const reserveCacheRequest = { |         const cacheUrl = getCacheUrl(); | ||||||
|             key |         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|         }; |         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||||
|         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); |         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||||
|         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); |         const postUrl = cacheUrl + resource; | ||||||
|     }); |         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ | ||||||
| } |             bearerCredentialHandler | ||||||
| exports.reserveCache = reserveCache; |         ]); | ||||||
| function getContentRange(start, end) { |  | ||||||
|     // Format: `bytes start-end/filesize |  | ||||||
|     // start and end are inclusive |  | ||||||
|     // filesize can be * |  | ||||||
|     // For a 200 byte chunk starting at byte 0: |  | ||||||
|     // Content-Range: bytes 0-199/* |  | ||||||
|     return `bytes ${start}-${end}/*`; |  | ||||||
| } |  | ||||||
| // function bufferToStream(buffer: Buffer): NodeJS.ReadableStream { |  | ||||||
| //     const stream = new Duplex(); |  | ||||||
| //     stream.push(buffer); |  | ||||||
| //     stream.push(null); |  | ||||||
| //     return stream; |  | ||||||
| // } |  | ||||||
| function uploadChunk(restClient, resourceUrl, data, start, end) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); |  | ||||||
|         const requestOptions = getRequestOptions(); |         const requestOptions = getRequestOptions(); | ||||||
|         requestOptions.additionalHeaders = { |         requestOptions.additionalHeaders = { | ||||||
|             "Content-Type": "application/octet-stream", |             "Content-Type": "application/octet-stream" | ||||||
|             "Content-Range": getContentRange(start, end) |  | ||||||
|         }; |         }; | ||||||
|         return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); |         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); | ||||||
|     }); |         if (response.statusCode !== 200) { | ||||||
| } |             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
| function commitCache(restClient, cacheId, filesize) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const requestOptions = getRequestOptions(); |  | ||||||
|         const commitCacheRequest = { size: filesize }; |  | ||||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function uploadFile(restClient, cacheId, archivePath) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         // Upload Chunks |  | ||||||
|         const fileSize = fs.statSync(archivePath).size; |  | ||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |  | ||||||
|         const responses = []; |  | ||||||
|         const fd = fs.openSync(archivePath, "r"); |  | ||||||
|         const concurrency = 16; // # of HTTP requests in parallel |  | ||||||
|         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks |  | ||||||
|         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); |  | ||||||
|         const parallelUploads = [...new Array(concurrency).keys()]; |  | ||||||
|         core.debug("Awaiting all uploads"); |  | ||||||
|         let offset = 0; |  | ||||||
|         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { |  | ||||||
|             while (offset < fileSize) { |  | ||||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |  | ||||||
|                 const start = offset; |  | ||||||
|                 const end = offset + chunkSize - 1; |  | ||||||
|                 offset += MAX_CHUNK_SIZE; |  | ||||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); |  | ||||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); |  | ||||||
|             } |  | ||||||
|         }))); |  | ||||||
|         fs.closeSync(fd); |  | ||||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); |  | ||||||
|         if (failedResponse) { |  | ||||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); |  | ||||||
|         } |  | ||||||
|         return; |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function saveCache(cacheId, archivePath) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const restClient = createRestClient(); |  | ||||||
|         core.debug("Upload cache"); |  | ||||||
|         yield uploadFile(restClient, cacheId, archivePath); |  | ||||||
|         core.debug("Commiting cache"); |  | ||||||
|         // Commit Cache |  | ||||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); |  | ||||||
|         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); |  | ||||||
|         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { |  | ||||||
|             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); |  | ||||||
|         } |         } | ||||||
|         core.info("Cache saved successfully"); |         core.info("Cache saved successfully"); | ||||||
|     }); |     }); | ||||||
| @ -3067,14 +2991,12 @@ var __importStar = (this && this.__importStar) || function (mod) { | |||||||
| }; | }; | ||||||
| Object.defineProperty(exports, "__esModule", { value: true }); | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
| const core = __importStar(__webpack_require__(470)); | const core = __importStar(__webpack_require__(470)); | ||||||
| const exec_1 = __webpack_require__(986); |  | ||||||
| const io = __importStar(__webpack_require__(1)); |  | ||||||
| const path = __importStar(__webpack_require__(622)); | const path = __importStar(__webpack_require__(622)); | ||||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||||
| const constants_1 = __webpack_require__(694); | const constants_1 = __webpack_require__(694); | ||||||
|  | const tar_1 = __webpack_require__(943); | ||||||
| const utils = __importStar(__webpack_require__(443)); | const utils = __importStar(__webpack_require__(443)); | ||||||
| function run() { | function run() { | ||||||
|     var _a, _b; |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         try { |         try { | ||||||
|             // Validate inputs, this can cause task failure |             // Validate inputs, this can cause task failure | ||||||
| @ -3112,7 +3034,7 @@ function run() { | |||||||
|             } |             } | ||||||
|             try { |             try { | ||||||
|                 const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); |                 const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); | ||||||
|                 if (!cacheEntry || !((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) { |                 if (!cacheEntry) { | ||||||
|                     core.info(`Cache not found for input keys: ${keys.join(", ")}.`); |                     core.info(`Cache not found for input keys: ${keys.join(", ")}.`); | ||||||
|                     return; |                     return; | ||||||
|                 } |                 } | ||||||
| @ -3121,28 +3043,10 @@ function run() { | |||||||
|                 // Store the cache result |                 // Store the cache result | ||||||
|                 utils.setCacheState(cacheEntry); |                 utils.setCacheState(cacheEntry); | ||||||
|                 // Download the cache from the cache entry |                 // Download the cache from the cache entry | ||||||
|                 yield cacheHttpClient.downloadCache((_b = cacheEntry) === null || _b === void 0 ? void 0 : _b.archiveLocation, archivePath); |                 yield cacheHttpClient.downloadCache(cacheEntry, archivePath); | ||||||
|                 yield exec_1.exec(`md5sum`, [archivePath]); |  | ||||||
|                 const archiveFileSize = utils.getArchiveFileSize(archivePath); |                 const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|                 core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); |                 core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); | ||||||
|                 // Create directory to extract tar into |                 yield tar_1.extractTar(archivePath, cachePath); | ||||||
|                 yield io.mkdirP(cachePath); |  | ||||||
|                 // http://man7.org/linux/man-pages/man1/tar.1.html |  | ||||||
|                 // tar [-options] <name of the tar archive> [files or directories which to add into archive] |  | ||||||
|                 const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|                 const args = IS_WINDOWS |  | ||||||
|                     ? [ |  | ||||||
|                         "-xz", |  | ||||||
|                         "--force-local", |  | ||||||
|                         "-f", |  | ||||||
|                         archivePath.replace(/\\/g, "/"), |  | ||||||
|                         "-C", |  | ||||||
|                         cachePath.replace(/\\/g, "/") |  | ||||||
|                     ] |  | ||||||
|                     : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|                 const tarPath = yield io.which("tar", true); |  | ||||||
|                 core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|                 yield exec_1.exec(`"${tarPath}"`, args); |  | ||||||
|                 const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); |                 const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); | ||||||
|                 utils.setCacheHitOutput(isExactKeyMatch); |                 utils.setCacheHitOutput(isExactKeyMatch); | ||||||
|                 core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); |                 core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); | ||||||
| @ -5238,6 +5142,63 @@ var personalaccesstoken_1 = __webpack_require__(327); | |||||||
| exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | /***/ }), | ||||||
|  |  | ||||||
|  | /***/ 943: | ||||||
|  | /***/ (function(__unusedmodule, exports, __webpack_require__) { | ||||||
|  |  | ||||||
|  | "use strict"; | ||||||
|  |  | ||||||
|  | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||||
|  |     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||||
|  |     return new (P || (P = Promise))(function (resolve, reject) { | ||||||
|  |         function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||||||
|  |         function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||||||
|  |         function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||||||
|  |         step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||||||
|  |     }); | ||||||
|  | }; | ||||||
|  | var __importStar = (this && this.__importStar) || function (mod) { | ||||||
|  |     if (mod && mod.__esModule) return mod; | ||||||
|  |     var result = {}; | ||||||
|  |     if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; | ||||||
|  |     result["default"] = mod; | ||||||
|  |     return result; | ||||||
|  | }; | ||||||
|  | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
|  | const exec_1 = __webpack_require__(986); | ||||||
|  | const io = __importStar(__webpack_require__(1)); | ||||||
|  | function extractTar(archivePath, targetDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Create directory to extract tar into | ||||||
|  |         yield io.mkdirP(targetDirectory); | ||||||
|  |         // http://man7.org/linux/man-pages/man1/tar.1.html | ||||||
|  |         // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||||
|  |         const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||||
|  |         yield exec_1.exec(`"${yield getTarPath()}"`, args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.extractTar = extractTar; | ||||||
|  | function createTar(archivePath, sourceDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // http://man7.org/linux/man-pages/man1/tar.1.html | ||||||
|  |         // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||||
|  |         const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||||
|  |         yield exec_1.exec(`"${yield getTarPath()}"`, args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.createTar = createTar; | ||||||
|  | function getTarPath() { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Explicitly use BSD Tar on Windows | ||||||
|  |         const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |         return IS_WINDOWS | ||||||
|  |             ? `${process.env["windir"]}\\System32\\tar.exe` | ||||||
|  |             : yield io.which("tar", true); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
| /***/ }), | /***/ }), | ||||||
|  |  | ||||||
| /***/ 986: | /***/ 986: | ||||||
|  | |||||||
							
								
								
									
										235
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										235
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1496,55 +1496,47 @@ const fs = __importStar(__webpack_require__(747)); | |||||||
| const Handlers_1 = __webpack_require__(941); | const Handlers_1 = __webpack_require__(941); | ||||||
| const HttpClient_1 = __webpack_require__(874); | const HttpClient_1 = __webpack_require__(874); | ||||||
| const RestClient_1 = __webpack_require__(105); | const RestClient_1 = __webpack_require__(105); | ||||||
| const utils = __importStar(__webpack_require__(443)); | function getCacheUrl() { | ||||||
| function isSuccessStatusCode(statusCode) { |  | ||||||
|     return statusCode >= 200 && statusCode < 300; |  | ||||||
| } |  | ||||||
| function getCacheApiUrl() { |  | ||||||
|     // Ideally we just use ACTIONS_CACHE_URL |     // Ideally we just use ACTIONS_CACHE_URL | ||||||
|     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || |     const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||||
|         process.env["ACTIONS_RUNTIME_URL"] || |         process.env["ACTIONS_RUNTIME_URL"] || | ||||||
|         "").replace("pipelines", "artifactcache"); |         "").replace("pipelines", "artifactcache"); | ||||||
|     if (!baseUrl) { |     if (!cacheUrl) { | ||||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); |         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||||
|     } |     } | ||||||
|     core.debug(`Cache Url: ${baseUrl}`); |     core.debug(`Cache Url: ${cacheUrl}`); | ||||||
|     return `${baseUrl}_apis/artifactcache/`; |     return cacheUrl; | ||||||
| } | } | ||||||
| function createAcceptHeader(type, apiVersion) { | function createAcceptHeader(type, apiVersion) { | ||||||
|     return `${type};api-version=${apiVersion}`; |     return `${type};api-version=${apiVersion}`; | ||||||
| } | } | ||||||
| function getRequestOptions() { | function getRequestOptions() { | ||||||
|     const requestOptions = { |     const requestOptions = { | ||||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") |         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||||
|     }; |     }; | ||||||
|     return requestOptions; |     return requestOptions; | ||||||
| } | } | ||||||
| function createRestClient() { |  | ||||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |  | ||||||
|     const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); |  | ||||||
|     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ |  | ||||||
|         bearerCredentialHandler |  | ||||||
|     ]); |  | ||||||
| } |  | ||||||
| function getCacheEntry(keys) { | function getCacheEntry(keys) { | ||||||
|     var _a; |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const restClient = createRestClient(); |         const cacheUrl = getCacheUrl(); | ||||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; |         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|  |         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||||
|  |         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||||
|  |         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ | ||||||
|  |             bearerCredentialHandler | ||||||
|  |         ]); | ||||||
|         const response = yield restClient.get(resource, getRequestOptions()); |         const response = yield restClient.get(resource, getRequestOptions()); | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (!isSuccessStatusCode(response.statusCode)) { |         if (response.statusCode !== 200) { | ||||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); |             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
|         } |         } | ||||||
|         const cacheResult = response.result; |         const cacheResult = response.result; | ||||||
|         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; |         if (!cacheResult || !cacheResult.archiveLocation) { | ||||||
|         if (!cacheDownloadUrl) { |  | ||||||
|             throw new Error("Cache not found."); |             throw new Error("Cache not found."); | ||||||
|         } |         } | ||||||
|         core.setSecret(cacheDownloadUrl); |         core.setSecret(cacheResult.archiveLocation); | ||||||
|         core.debug(`Cache Result:`); |         core.debug(`Cache Result:`); | ||||||
|         core.debug(JSON.stringify(cacheResult)); |         core.debug(JSON.stringify(cacheResult)); | ||||||
|         return cacheResult; |         return cacheResult; | ||||||
| @ -1560,102 +1552,34 @@ function pipeResponseToStream(response, stream) { | |||||||
|         }); |         }); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function downloadCache(archiveLocation, archivePath) { | function downloadCache(cacheEntry, archivePath) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const stream = fs.createWriteStream(archivePath); |         const stream = fs.createWriteStream(archivePath); | ||||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); |         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||||
|         const downloadResponse = yield httpClient.get(archiveLocation); |         // eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||||||
|  |         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); | ||||||
|         yield pipeResponseToStream(downloadResponse, stream); |         yield pipeResponseToStream(downloadResponse, stream); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.downloadCache = downloadCache; | exports.downloadCache = downloadCache; | ||||||
| // Reserve Cache | function saveCache(key, archivePath) { | ||||||
| function reserveCache(key) { |  | ||||||
|     var _a, _b, _c; |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const restClient = createRestClient(); |         const stream = fs.createReadStream(archivePath); | ||||||
|         const reserveCacheRequest = { |         const cacheUrl = getCacheUrl(); | ||||||
|             key |         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|         }; |         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||||
|         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); |         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||||
|         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); |         const postUrl = cacheUrl + resource; | ||||||
|     }); |         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ | ||||||
| } |             bearerCredentialHandler | ||||||
| exports.reserveCache = reserveCache; |         ]); | ||||||
| function getContentRange(start, end) { |  | ||||||
|     // Format: `bytes start-end/filesize |  | ||||||
|     // start and end are inclusive |  | ||||||
|     // filesize can be * |  | ||||||
|     // For a 200 byte chunk starting at byte 0: |  | ||||||
|     // Content-Range: bytes 0-199/* |  | ||||||
|     return `bytes ${start}-${end}/*`; |  | ||||||
| } |  | ||||||
| // function bufferToStream(buffer: Buffer): NodeJS.ReadableStream { |  | ||||||
| //     const stream = new Duplex(); |  | ||||||
| //     stream.push(buffer); |  | ||||||
| //     stream.push(null); |  | ||||||
| //     return stream; |  | ||||||
| // } |  | ||||||
| function uploadChunk(restClient, resourceUrl, data, start, end) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); |  | ||||||
|         const requestOptions = getRequestOptions(); |         const requestOptions = getRequestOptions(); | ||||||
|         requestOptions.additionalHeaders = { |         requestOptions.additionalHeaders = { | ||||||
|             "Content-Type": "application/octet-stream", |             "Content-Type": "application/octet-stream" | ||||||
|             "Content-Range": getContentRange(start, end) |  | ||||||
|         }; |         }; | ||||||
|         return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); |         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); | ||||||
|     }); |         if (response.statusCode !== 200) { | ||||||
| } |             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
| function commitCache(restClient, cacheId, filesize) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const requestOptions = getRequestOptions(); |  | ||||||
|         const commitCacheRequest = { size: filesize }; |  | ||||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function uploadFile(restClient, cacheId, archivePath) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         // Upload Chunks |  | ||||||
|         const fileSize = fs.statSync(archivePath).size; |  | ||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |  | ||||||
|         const responses = []; |  | ||||||
|         const fd = fs.openSync(archivePath, "r"); |  | ||||||
|         const concurrency = 16; // # of HTTP requests in parallel |  | ||||||
|         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks |  | ||||||
|         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); |  | ||||||
|         const parallelUploads = [...new Array(concurrency).keys()]; |  | ||||||
|         core.debug("Awaiting all uploads"); |  | ||||||
|         let offset = 0; |  | ||||||
|         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { |  | ||||||
|             while (offset < fileSize) { |  | ||||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |  | ||||||
|                 const start = offset; |  | ||||||
|                 const end = offset + chunkSize - 1; |  | ||||||
|                 offset += MAX_CHUNK_SIZE; |  | ||||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); |  | ||||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); |  | ||||||
|             } |  | ||||||
|         }))); |  | ||||||
|         fs.closeSync(fd); |  | ||||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); |  | ||||||
|         if (failedResponse) { |  | ||||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); |  | ||||||
|         } |  | ||||||
|         return; |  | ||||||
|     }); |  | ||||||
| } |  | ||||||
| function saveCache(cacheId, archivePath) { |  | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |  | ||||||
|         const restClient = createRestClient(); |  | ||||||
|         core.debug("Upload cache"); |  | ||||||
|         yield uploadFile(restClient, cacheId, archivePath); |  | ||||||
|         core.debug("Commiting cache"); |  | ||||||
|         // Commit Cache |  | ||||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); |  | ||||||
|         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); |  | ||||||
|         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { |  | ||||||
|             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); |  | ||||||
|         } |         } | ||||||
|         core.info("Cache saved successfully"); |         core.info("Cache saved successfully"); | ||||||
|     }); |     }); | ||||||
| @ -2955,11 +2879,10 @@ var __importStar = (this && this.__importStar) || function (mod) { | |||||||
| }; | }; | ||||||
| Object.defineProperty(exports, "__esModule", { value: true }); | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
| const core = __importStar(__webpack_require__(470)); | const core = __importStar(__webpack_require__(470)); | ||||||
| const exec_1 = __webpack_require__(986); |  | ||||||
| const io = __importStar(__webpack_require__(1)); |  | ||||||
| const path = __importStar(__webpack_require__(622)); | const path = __importStar(__webpack_require__(622)); | ||||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||||
| const constants_1 = __webpack_require__(694); | const constants_1 = __webpack_require__(694); | ||||||
|  | const tar_1 = __webpack_require__(943); | ||||||
| const utils = __importStar(__webpack_require__(443)); | const utils = __importStar(__webpack_require__(443)); | ||||||
| function run() { | function run() { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
| @ -2981,44 +2904,19 @@ function run() { | |||||||
|                 core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); |                 core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|             core.debug("Reserving Cache"); |  | ||||||
|             const cacheId = yield cacheHttpClient.reserveCache(primaryKey); |  | ||||||
|             if (cacheId < 0) { |  | ||||||
|                 core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`); |  | ||||||
|                 return; |  | ||||||
|             } |  | ||||||
|             core.debug(`Cache ID: ${cacheId}`); |  | ||||||
|             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); |             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||||
|             core.debug(`Cache Path: ${cachePath}`); |             core.debug(`Cache Path: ${cachePath}`); | ||||||
|             const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); |             const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); | ||||||
|             core.debug(`Archive Path: ${archivePath}`); |             core.debug(`Archive Path: ${archivePath}`); | ||||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html |             yield tar_1.createTar(archivePath, cachePath); | ||||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] |             const fileSizeLimit = 400 * 1024 * 1024; // 400MB | ||||||
|             const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|             const args = IS_WINDOWS |  | ||||||
|                 ? [ |  | ||||||
|                     "-cz", |  | ||||||
|                     "--force-local", |  | ||||||
|                     "-f", |  | ||||||
|                     archivePath.replace(/\\/g, "/"), |  | ||||||
|                     "-C", |  | ||||||
|                     cachePath.replace(/\\/g, "/"), |  | ||||||
|                     "." |  | ||||||
|                 ] |  | ||||||
|                 : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|             const tarPath = yield io.which("tar", true); |  | ||||||
|             core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|             yield exec_1.exec(`"${tarPath}"`, args); |  | ||||||
|             const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit |  | ||||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); |             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|             core.debug(`File Size: ${archiveFileSize}`); |             core.debug(`File Size: ${archiveFileSize}`); | ||||||
|             if (archiveFileSize > fileSizeLimit) { |             if (archiveFileSize > fileSizeLimit) { | ||||||
|                 utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024 * 1024))} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`); |                 utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`); | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|             yield exec_1.exec(`md5sum`, [archivePath]); |             yield cacheHttpClient.saveCache(primaryKey, archivePath); | ||||||
|             core.debug("Saving Cache"); |  | ||||||
|             yield cacheHttpClient.saveCache(cacheId, archivePath); |  | ||||||
|         } |         } | ||||||
|         catch (error) { |         catch (error) { | ||||||
|             utils.logWarning(error.message); |             utils.logWarning(error.message); | ||||||
| @ -5218,6 +5116,63 @@ var personalaccesstoken_1 = __webpack_require__(327); | |||||||
| exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | /***/ }), | ||||||
|  |  | ||||||
|  | /***/ 943: | ||||||
|  | /***/ (function(__unusedmodule, exports, __webpack_require__) { | ||||||
|  |  | ||||||
|  | "use strict"; | ||||||
|  |  | ||||||
|  | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||||
|  |     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||||
|  |     return new (P || (P = Promise))(function (resolve, reject) { | ||||||
|  |         function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||||||
|  |         function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||||||
|  |         function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||||||
|  |         step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||||||
|  |     }); | ||||||
|  | }; | ||||||
|  | var __importStar = (this && this.__importStar) || function (mod) { | ||||||
|  |     if (mod && mod.__esModule) return mod; | ||||||
|  |     var result = {}; | ||||||
|  |     if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; | ||||||
|  |     result["default"] = mod; | ||||||
|  |     return result; | ||||||
|  | }; | ||||||
|  | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
|  | const exec_1 = __webpack_require__(986); | ||||||
|  | const io = __importStar(__webpack_require__(1)); | ||||||
|  | function extractTar(archivePath, targetDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Create directory to extract tar into | ||||||
|  |         yield io.mkdirP(targetDirectory); | ||||||
|  |         // http://man7.org/linux/man-pages/man1/tar.1.html | ||||||
|  |         // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||||
|  |         const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||||
|  |         yield exec_1.exec(`"${yield getTarPath()}"`, args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.extractTar = extractTar; | ||||||
|  | function createTar(archivePath, sourceDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // http://man7.org/linux/man-pages/man1/tar.1.html | ||||||
|  |         // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||||
|  |         const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||||
|  |         yield exec_1.exec(`"${yield getTarPath()}"`, args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.createTar = createTar; | ||||||
|  | function getTarPath() { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Explicitly use BSD Tar on Windows | ||||||
|  |         const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |         return IS_WINDOWS | ||||||
|  |             ? `${process.env["windir"]}\\System32\\tar.exe` | ||||||
|  |             : yield io.which("tar", true); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
| /***/ }), | /***/ }), | ||||||
|  |  | ||||||
| /***/ 986: | /***/ 986: | ||||||
|  | |||||||
							
								
								
									
										12
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										12
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							| @ -4859,9 +4859,9 @@ | |||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "prettier": { |     "prettier": { | ||||||
|       "version": "1.19.1", |       "version": "1.18.2", | ||||||
|       "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", |       "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.18.2.tgz", | ||||||
|       "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", |       "integrity": "sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw==", | ||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "prettier-linter-helpers": { |     "prettier-linter-helpers": { | ||||||
| @ -5983,9 +5983,9 @@ | |||||||
|       } |       } | ||||||
|     }, |     }, | ||||||
|     "typescript": { |     "typescript": { | ||||||
|       "version": "3.7.3", |       "version": "3.6.4", | ||||||
|       "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz", |       "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.6.4.tgz", | ||||||
|       "integrity": "sha512-Mcr/Qk7hXqFBXMN7p7Lusj1ktCBydylfQM/FZCk5glCNQJrCUKPkMHdo9R0MTFWsC/4kPFvDS0fDPvukfCkFsw==", |       "integrity": "sha512-unoCll1+l+YK4i4F8f22TaNVPRHcD9PA3yCuZ8g5e0qGqlVlJ/8FSateOLLSagn+Yg5+ZwuPkL8LFUc0Jcvksg==", | ||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "uglify-js": { |     "uglify-js": { | ||||||
|  | |||||||
| @ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "name": "cache", |   "name": "cache", | ||||||
|   "version": "1.1.0", |   "version": "1.0.3", | ||||||
|   "private": true, |   "private": true, | ||||||
|   "description": "Cache dependencies and build outputs", |   "description": "Cache dependencies and build outputs", | ||||||
|   "main": "dist/restore/index.js", |   "main": "dist/restore/index.js", | ||||||
| @ -46,8 +46,8 @@ | |||||||
|     "jest": "^24.8.0", |     "jest": "^24.8.0", | ||||||
|     "jest-circus": "^24.7.1", |     "jest-circus": "^24.7.1", | ||||||
|     "nock": "^11.7.0", |     "nock": "^11.7.0", | ||||||
|     "prettier": "^1.19.1", |     "prettier": "1.18.2", | ||||||
|     "ts-jest": "^24.0.2", |     "ts-jest": "^24.0.2", | ||||||
|     "typescript": "^3.7.3" |     "typescript": "^3.6.4" | ||||||
|   } |   } | ||||||
| } | } | ||||||
|  | |||||||
| @ -3,37 +3,24 @@ import * as fs from "fs"; | |||||||
| import { BearerCredentialHandler } from "typed-rest-client/Handlers"; | import { BearerCredentialHandler } from "typed-rest-client/Handlers"; | ||||||
| import { HttpClient } from "typed-rest-client/HttpClient"; | import { HttpClient } from "typed-rest-client/HttpClient"; | ||||||
| import { IHttpClientResponse } from "typed-rest-client/Interfaces"; | import { IHttpClientResponse } from "typed-rest-client/Interfaces"; | ||||||
| import { | import { IRequestOptions, RestClient } from "typed-rest-client/RestClient"; | ||||||
|     IRequestOptions, | import { ArtifactCacheEntry } from "./contracts"; | ||||||
|     RestClient, |  | ||||||
|     IRestResponse |  | ||||||
| } from "typed-rest-client/RestClient"; |  | ||||||
| import { |  | ||||||
|     ArtifactCacheEntry, |  | ||||||
|     CommitCacheRequest, |  | ||||||
|     ReserveCacheRequest, |  | ||||||
|     ReserverCacheResponse |  | ||||||
| } from "./contracts"; |  | ||||||
| import * as utils from "./utils/actionUtils"; |  | ||||||
|  |  | ||||||
| function isSuccessStatusCode(statusCode: number): boolean { | function getCacheUrl(): string { | ||||||
|     return statusCode >= 200 && statusCode < 300; |  | ||||||
| } |  | ||||||
| function getCacheApiUrl(): string { |  | ||||||
|     // Ideally we just use ACTIONS_CACHE_URL |     // Ideally we just use ACTIONS_CACHE_URL | ||||||
|     const baseUrl: string = ( |     const cacheUrl: string = ( | ||||||
|         process.env["ACTIONS_CACHE_URL"] || |         process.env["ACTIONS_CACHE_URL"] || | ||||||
|         process.env["ACTIONS_RUNTIME_URL"] || |         process.env["ACTIONS_RUNTIME_URL"] || | ||||||
|         "" |         "" | ||||||
|     ).replace("pipelines", "artifactcache"); |     ).replace("pipelines", "artifactcache"); | ||||||
|     if (!baseUrl) { |     if (!cacheUrl) { | ||||||
|         throw new Error( |         throw new Error( | ||||||
|             "Cache Service Url not found, unable to restore cache." |             "Cache Service Url not found, unable to restore cache." | ||||||
|         ); |         ); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     core.debug(`Cache Url: ${baseUrl}`); |     core.debug(`Cache Url: ${cacheUrl}`); | ||||||
|     return `${baseUrl}_apis/artifactcache/`; |     return cacheUrl; | ||||||
| } | } | ||||||
|  |  | ||||||
| function createAcceptHeader(type: string, apiVersion: string): string { | function createAcceptHeader(type: string, apiVersion: string): string { | ||||||
| @ -42,26 +29,26 @@ function createAcceptHeader(type: string, apiVersion: string): string { | |||||||
|  |  | ||||||
| function getRequestOptions(): IRequestOptions { | function getRequestOptions(): IRequestOptions { | ||||||
|     const requestOptions: IRequestOptions = { |     const requestOptions: IRequestOptions = { | ||||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") |         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     return requestOptions; |     return requestOptions; | ||||||
| } | } | ||||||
|  |  | ||||||
| function createRestClient(): RestClient { |  | ||||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |  | ||||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); |  | ||||||
|  |  | ||||||
|     return new RestClient("actions/cache", getCacheApiUrl(), [ |  | ||||||
|         bearerCredentialHandler |  | ||||||
|     ]); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export async function getCacheEntry( | export async function getCacheEntry( | ||||||
|     keys: string[] |     keys: string[] | ||||||
| ): Promise<ArtifactCacheEntry | null> { | ): Promise<ArtifactCacheEntry | null> { | ||||||
|     const restClient = createRestClient(); |     const cacheUrl = getCacheUrl(); | ||||||
|     const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; |     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|  |     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||||
|  |  | ||||||
|  |     const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent( | ||||||
|  |         keys.join(",") | ||||||
|  |     )}`; | ||||||
|  |  | ||||||
|  |     const restClient = new RestClient("actions/cache", cacheUrl, [ | ||||||
|  |         bearerCredentialHandler | ||||||
|  |     ]); | ||||||
|  |  | ||||||
|     const response = await restClient.get<ArtifactCacheEntry>( |     const response = await restClient.get<ArtifactCacheEntry>( | ||||||
|         resource, |         resource, | ||||||
| @ -70,15 +57,14 @@ export async function getCacheEntry( | |||||||
|     if (response.statusCode === 204) { |     if (response.statusCode === 204) { | ||||||
|         return null; |         return null; | ||||||
|     } |     } | ||||||
|     if (!isSuccessStatusCode(response.statusCode)) { |     if (response.statusCode !== 200) { | ||||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); |         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
|     } |     } | ||||||
|     const cacheResult = response.result; |     const cacheResult = response.result; | ||||||
|     const cacheDownloadUrl = cacheResult?.archiveLocation; |     if (!cacheResult || !cacheResult.archiveLocation) { | ||||||
|     if (!cacheDownloadUrl) { |  | ||||||
|         throw new Error("Cache not found."); |         throw new Error("Cache not found."); | ||||||
|     } |     } | ||||||
|     core.setSecret(cacheDownloadUrl); |     core.setSecret(cacheResult.archiveLocation); | ||||||
|     core.debug(`Cache Result:`); |     core.debug(`Cache Result:`); | ||||||
|     core.debug(JSON.stringify(cacheResult)); |     core.debug(JSON.stringify(cacheResult)); | ||||||
|  |  | ||||||
| @ -97,165 +83,46 @@ async function pipeResponseToStream( | |||||||
| } | } | ||||||
|  |  | ||||||
| export async function downloadCache( | export async function downloadCache( | ||||||
|     archiveLocation: string, |     cacheEntry: ArtifactCacheEntry, | ||||||
|     archivePath: string |     archivePath: string | ||||||
| ): Promise<void> { | ): Promise<void> { | ||||||
|     const stream = fs.createWriteStream(archivePath); |     const stream = fs.createWriteStream(archivePath); | ||||||
|     const httpClient = new HttpClient("actions/cache"); |     const httpClient = new HttpClient("actions/cache"); | ||||||
|     const downloadResponse = await httpClient.get(archiveLocation); |     // eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||||||
|  |     const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!); | ||||||
|     await pipeResponseToStream(downloadResponse, stream); |     await pipeResponseToStream(downloadResponse, stream); | ||||||
| } | } | ||||||
|  |  | ||||||
| // Reserve Cache | export async function saveCache( | ||||||
| export async function reserveCache(key: string): Promise<number> { |     key: string, | ||||||
|     const restClient = createRestClient(); |     archivePath: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     const stream = fs.createReadStream(archivePath); | ||||||
|  |  | ||||||
|     const reserveCacheRequest: ReserveCacheRequest = { |     const cacheUrl = getCacheUrl(); | ||||||
|         key |     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|     }; |     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||||
|     const response = await restClient.create<ReserverCacheResponse>( |  | ||||||
|         "caches", |  | ||||||
|         reserveCacheRequest, |  | ||||||
|         getRequestOptions() |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|     return response?.result?.cacheId ?? -1; |     const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||||
| } |     const postUrl = cacheUrl + resource; | ||||||
|  |  | ||||||
| function getContentRange(start: number, end: number): string { |     const restClient = new RestClient("actions/cache", undefined, [ | ||||||
|     // Format: `bytes start-end/filesize |         bearerCredentialHandler | ||||||
|     // start and end are inclusive |     ]); | ||||||
|     // filesize can be * |  | ||||||
|     // For a 200 byte chunk starting at byte 0: |  | ||||||
|     // Content-Range: bytes 0-199/* |  | ||||||
|     return `bytes ${start}-${end}/*`; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| async function uploadChunk( |  | ||||||
|     restClient: RestClient, |  | ||||||
|     resourceUrl: string, |  | ||||||
|     data: NodeJS.ReadableStream, |  | ||||||
|     start: number, |  | ||||||
|     end: number |  | ||||||
| ): Promise<IRestResponse<void>> { |  | ||||||
|     core.debug( |  | ||||||
|         `Uploading chunk of size ${end - |  | ||||||
|             start + |  | ||||||
|             1} bytes at offset ${start} with content range: ${getContentRange( |  | ||||||
|             start, |  | ||||||
|             end |  | ||||||
|         )}` |  | ||||||
|     ); |  | ||||||
|     const requestOptions = getRequestOptions(); |     const requestOptions = getRequestOptions(); | ||||||
|     requestOptions.additionalHeaders = { |     requestOptions.additionalHeaders = { | ||||||
|         "Content-Type": "application/octet-stream", |         "Content-Type": "application/octet-stream" | ||||||
|         "Content-Range": getContentRange(start, end) |  | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     return await restClient.uploadStream<void>( |     const response = await restClient.uploadStream<void>( | ||||||
|         "PATCH", |         "POST", | ||||||
|         resourceUrl, |         postUrl, | ||||||
|         data, |         stream, | ||||||
|         requestOptions |         requestOptions | ||||||
|     ); |     ); | ||||||
| } |     if (response.statusCode !== 200) { | ||||||
|  |         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
| async function uploadFile( |  | ||||||
|     restClient: RestClient, |  | ||||||
|     cacheId: number, |  | ||||||
|     archivePath: string |  | ||||||
| ): Promise<void> { |  | ||||||
|     // Upload Chunks |  | ||||||
|     const fileSize = fs.statSync(archivePath).size; |  | ||||||
|     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |  | ||||||
|     const responses: IRestResponse<void>[] = []; |  | ||||||
|     const fd = fs.openSync(archivePath, "r"); |  | ||||||
|  |  | ||||||
|     const concurrency = 4; // # of HTTP requests in parallel |  | ||||||
|     const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks |  | ||||||
|     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); |  | ||||||
|  |  | ||||||
|     const parallelUploads = [...new Array(concurrency).keys()]; |  | ||||||
|     core.debug("Awaiting all uploads"); |  | ||||||
|     let offset = 0; |  | ||||||
|     await Promise.all( |  | ||||||
|         parallelUploads.map(async () => { |  | ||||||
|             while (offset < fileSize) { |  | ||||||
|                 const chunkSize = |  | ||||||
|                     offset + MAX_CHUNK_SIZE > fileSize |  | ||||||
|                         ? fileSize - offset |  | ||||||
|                         : MAX_CHUNK_SIZE; |  | ||||||
|                 const start = offset; |  | ||||||
|                 const end = offset + chunkSize - 1; |  | ||||||
|                 offset += MAX_CHUNK_SIZE; |  | ||||||
|                 const chunk = fs.createReadStream(archivePath, { |  | ||||||
|                     fd, |  | ||||||
|                     start, |  | ||||||
|                     end, |  | ||||||
|                     autoClose: false |  | ||||||
|                 }); |  | ||||||
|                 responses.push( |  | ||||||
|                     await uploadChunk( |  | ||||||
|                         restClient, |  | ||||||
|                         resourceUrl, |  | ||||||
|                         chunk, |  | ||||||
|                         start, |  | ||||||
|                         end |  | ||||||
|                     ) |  | ||||||
|                 ); |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|     fs.closeSync(fd); |  | ||||||
|  |  | ||||||
|     const failedResponse = responses.find( |  | ||||||
|         x => !isSuccessStatusCode(x.statusCode) |  | ||||||
|     ); |  | ||||||
|     if (failedResponse) { |  | ||||||
|         throw new Error( |  | ||||||
|             `Cache service responded with ${failedResponse.statusCode} during chunk upload.` |  | ||||||
|         ); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     return; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| async function commitCache( |  | ||||||
|     restClient: RestClient, |  | ||||||
|     cacheId: number, |  | ||||||
|     filesize: number |  | ||||||
| ): Promise<IRestResponse<void>> { |  | ||||||
|     const requestOptions = getRequestOptions(); |  | ||||||
|     const commitCacheRequest: CommitCacheRequest = { size: filesize }; |  | ||||||
|     return await restClient.create( |  | ||||||
|         `caches/${cacheId.toString()}`, |  | ||||||
|         commitCacheRequest, |  | ||||||
|         requestOptions |  | ||||||
|     ); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export async function saveCache( |  | ||||||
|     cacheId: number, |  | ||||||
|     archivePath: string |  | ||||||
| ): Promise<void> { |  | ||||||
|     const restClient = createRestClient(); |  | ||||||
|  |  | ||||||
|     core.debug("Upload cache"); |  | ||||||
|     await uploadFile(restClient, cacheId, archivePath); |  | ||||||
|  |  | ||||||
|     // Commit Cache |  | ||||||
|     core.debug("Commiting cache"); |  | ||||||
|     const cacheSize = utils.getArchiveFileSize(archivePath); |  | ||||||
|     const commitCacheResponse = await commitCache( |  | ||||||
|         restClient, |  | ||||||
|         cacheId, |  | ||||||
|         cacheSize |  | ||||||
|     ); |  | ||||||
|     if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { |  | ||||||
|         throw new Error( |  | ||||||
|             `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` |  | ||||||
|         ); |  | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     core.info("Cache saved successfully"); |     core.info("Cache saved successfully"); | ||||||
|  | |||||||
							
								
								
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							| @ -4,16 +4,3 @@ export interface ArtifactCacheEntry { | |||||||
|     creationTime?: string; |     creationTime?: string; | ||||||
|     archiveLocation?: string; |     archiveLocation?: string; | ||||||
| } | } | ||||||
|  |  | ||||||
| export interface CommitCacheRequest { |  | ||||||
|     size: number; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export interface ReserveCacheRequest { |  | ||||||
|     key: string; |  | ||||||
|     version?: string; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export interface ReserverCacheResponse { |  | ||||||
|     cacheId: number; |  | ||||||
| } |  | ||||||
|  | |||||||
| @ -1,9 +1,8 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import { exec } from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "./cacheHttpClient"; | import * as cacheHttpClient from "./cacheHttpClient"; | ||||||
| import { Events, Inputs, State } from "./constants"; | import { Events, Inputs, State } from "./constants"; | ||||||
|  | import { extractTar } from "./tar"; | ||||||
| import * as utils from "./utils/actionUtils"; | import * as utils from "./utils/actionUtils"; | ||||||
|  |  | ||||||
| async function run(): Promise<void> { | async function run(): Promise<void> { | ||||||
| @ -61,7 +60,7 @@ async function run(): Promise<void> { | |||||||
|  |  | ||||||
|         try { |         try { | ||||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); |             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); | ||||||
|             if (!cacheEntry || !cacheEntry?.archiveLocation) { |             if (!cacheEntry) { | ||||||
|                 core.info( |                 core.info( | ||||||
|                     `Cache not found for input keys: ${keys.join(", ")}.` |                     `Cache not found for input keys: ${keys.join(", ")}.` | ||||||
|                 ); |                 ); | ||||||
| @ -78,10 +77,7 @@ async function run(): Promise<void> { | |||||||
|             utils.setCacheState(cacheEntry); |             utils.setCacheState(cacheEntry); | ||||||
|  |  | ||||||
|             // Download the cache from the cache entry |             // Download the cache from the cache entry | ||||||
|             await cacheHttpClient.downloadCache( |             await cacheHttpClient.downloadCache(cacheEntry, archivePath); | ||||||
|                 cacheEntry?.archiveLocation, |  | ||||||
|                 archivePath |  | ||||||
|             ); |  | ||||||
|  |  | ||||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); |             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|             core.info( |             core.info( | ||||||
| @ -90,27 +86,7 @@ async function run(): Promise<void> { | |||||||
|                 )} MB (${archiveFileSize} B)` |                 )} MB (${archiveFileSize} B)` | ||||||
|             ); |             ); | ||||||
|  |  | ||||||
|             // Create directory to extract tar into |             await extractTar(archivePath, cachePath); | ||||||
|             await io.mkdirP(cachePath); |  | ||||||
|  |  | ||||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html |  | ||||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] |  | ||||||
|             const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|             const args = IS_WINDOWS |  | ||||||
|                 ? [ |  | ||||||
|                       "-xz", |  | ||||||
|                       "--force-local", |  | ||||||
|                       "-f", |  | ||||||
|                       archivePath.replace(/\\/g, "/"), |  | ||||||
|                       "-C", |  | ||||||
|                       cachePath.replace(/\\/g, "/") |  | ||||||
|                   ] |  | ||||||
|                 : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|             const tarPath = await io.which("tar", true); |  | ||||||
|             core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|  |  | ||||||
|             await exec(`"${tarPath}"`, args); |  | ||||||
|  |  | ||||||
|             const isExactKeyMatch = utils.isExactKeyMatch( |             const isExactKeyMatch = utils.isExactKeyMatch( | ||||||
|                 primaryKey, |                 primaryKey, | ||||||
|  | |||||||
							
								
								
									
										40
									
								
								src/save.ts
									
									
									
									
									
								
							
							
						
						
									
										40
									
								
								src/save.ts
									
									
									
									
									
								
							| @ -1,9 +1,8 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import { exec } from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "./cacheHttpClient"; | import * as cacheHttpClient from "./cacheHttpClient"; | ||||||
| import { Events, Inputs, State } from "./constants"; | import { Events, Inputs, State } from "./constants"; | ||||||
|  | import { createTar } from "./tar"; | ||||||
| import * as utils from "./utils/actionUtils"; | import * as utils from "./utils/actionUtils"; | ||||||
|  |  | ||||||
| async function run(): Promise<void> { | async function run(): Promise<void> { | ||||||
| @ -35,15 +34,6 @@ async function run(): Promise<void> { | |||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         core.debug("Reserving Cache"); |  | ||||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey); |  | ||||||
|         if (cacheId < 0) { |  | ||||||
|             core.info( |  | ||||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` |  | ||||||
|             ); |  | ||||||
|             return; |  | ||||||
|         } |  | ||||||
|         core.debug(`Cache ID: ${cacheId}`); |  | ||||||
|         const cachePath = utils.resolvePath( |         const cachePath = utils.resolvePath( | ||||||
|             core.getInput(Inputs.Path, { required: true }) |             core.getInput(Inputs.Path, { required: true }) | ||||||
|         ); |         ); | ||||||
| @ -55,39 +45,21 @@ async function run(): Promise<void> { | |||||||
|         ); |         ); | ||||||
|         core.debug(`Archive Path: ${archivePath}`); |         core.debug(`Archive Path: ${archivePath}`); | ||||||
|  |  | ||||||
|         // http://man7.org/linux/man-pages/man1/tar.1.html |         await createTar(archivePath, cachePath); | ||||||
|         // tar [-options] <name of the tar archive> [files or directories which to add into archive] |  | ||||||
|         const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|         const args = IS_WINDOWS |  | ||||||
|             ? [ |  | ||||||
|                   "-cz", |  | ||||||
|                   "--force-local", |  | ||||||
|                   "-f", |  | ||||||
|                   archivePath.replace(/\\/g, "/"), |  | ||||||
|                   "-C", |  | ||||||
|                   cachePath.replace(/\\/g, "/"), |  | ||||||
|                   "." |  | ||||||
|               ] |  | ||||||
|             : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|         const tarPath = await io.which("tar", true); |         const fileSizeLimit = 400 * 1024 * 1024; // 400MB | ||||||
|         core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|         await exec(`"${tarPath}"`, args); |  | ||||||
|  |  | ||||||
|         const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit |  | ||||||
|         const archiveFileSize = utils.getArchiveFileSize(archivePath); |         const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|         core.debug(`File Size: ${archiveFileSize}`); |         core.debug(`File Size: ${archiveFileSize}`); | ||||||
|         if (archiveFileSize > fileSizeLimit) { |         if (archiveFileSize > fileSizeLimit) { | ||||||
|             utils.logWarning( |             utils.logWarning( | ||||||
|                 `Cache size of ~${Math.round( |                 `Cache size of ~${Math.round( | ||||||
|                     archiveFileSize / (1024 * 1024 * 1024) |                     archiveFileSize / (1024 * 1024) | ||||||
|                 )} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` |                 )} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.` | ||||||
|             ); |             ); | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         core.debug("Saving Cache"); |         await cacheHttpClient.saveCache(primaryKey, archivePath); | ||||||
|         await cacheHttpClient.saveCache(cacheId, archivePath); |  | ||||||
|     } catch (error) { |     } catch (error) { | ||||||
|         utils.logWarning(error.message); |         utils.logWarning(error.message); | ||||||
|     } |     } | ||||||
|  | |||||||
							
								
								
									
										33
									
								
								src/tar.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								src/tar.ts
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,33 @@ | |||||||
|  | import { exec } from "@actions/exec"; | ||||||
|  | import * as io from "@actions/io"; | ||||||
|  |  | ||||||
|  | async function getTarPath(): Promise<string> { | ||||||
|  |     // Explicitly use BSD Tar on Windows | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |     return IS_WINDOWS | ||||||
|  |         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||||
|  |         : await io.which("tar", true); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function extractTar( | ||||||
|  |     archivePath: string, | ||||||
|  |     targetDirectory: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     // Create directory to extract tar into | ||||||
|  |     await io.mkdirP(targetDirectory); | ||||||
|  |  | ||||||
|  |     // http://man7.org/linux/man-pages/man1/tar.1.html | ||||||
|  |     // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||||
|  |     const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||||
|  |     await exec(`"${await getTarPath()}"`, args); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function createTar( | ||||||
|  |     archivePath: string, | ||||||
|  |     sourceDirectory: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     // http://man7.org/linux/man-pages/man1/tar.1.html | ||||||
|  |     // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||||
|  |     const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||||
|  |     await exec(`"${await getTarPath()}"`, args); | ||||||
|  | } | ||||||
		Reference in New Issue
	
	Block a user
	