mirror of
				https://github.com/actions/cache.git
				synced 2025-10-31 23:36:22 +07:00 
			
		
		
		
	Compare commits
	
		
			7 Commits
		
	
	
		
			joshmgross
			...
			joshmgross
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 7527073910 | |||
| a631fadf14 | |||
| e223b0a12d | |||
| decbafc350 | |||
| 3854a40aee | |||
| 0188dffc5a | |||
| 002d3a77f4 | 
| @ -56,7 +56,7 @@ jobs: | ||||
|  | ||||
| ## Implementation Examples | ||||
|  | ||||
| Every programming language and framework has it's own way of caching.  | ||||
| Every programming language and framework has its own way of caching. | ||||
|  | ||||
| See [Examples](examples.md) for a list of `actions/cache` implementations for use with: | ||||
|  | ||||
|  | ||||
| @ -1,18 +1,16 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { Events, Inputs } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import run from "../src/restore"; | ||||
| import * as tar from "../src/tar"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
|  | ||||
| jest.mock("@actions/exec"); | ||||
| jest.mock("@actions/io"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
| jest.mock("../src/cacheHttpClient"); | ||||
| jest.mock("../src/tar"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
|  | ||||
| beforeAll(() => { | ||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||
| @ -35,10 +33,6 @@ beforeAll(() => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getSupportedEvents(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | ||||
|         return Promise.resolve(tool); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| beforeEach(() => { | ||||
| @ -245,8 +239,7 @@ test("restore with cache found", async () => { | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
|  | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     await run(); | ||||
| @ -255,27 +248,11 @@ test("restore with cache found", async () => { | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|               "-xz", | ||||
|               "--force-local", | ||||
|               "-f", | ||||
|               archivePath.replace(/\\/g, "/"), | ||||
|               "-C", | ||||
|               cachePath.replace(/\\/g, "/") | ||||
|           ] | ||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
| @ -326,8 +303,7 @@ test("restore with a pull request event and cache found", async () => { | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
|  | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     await run(); | ||||
| @ -336,28 +312,12 @@ test("restore with a pull request event and cache found", async () => { | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|               "-xz", | ||||
|               "--force-local", | ||||
|               "-f", | ||||
|               archivePath.replace(/\\/g, "/"), | ||||
|               "-C", | ||||
|               cachePath.replace(/\\/g, "/") | ||||
|           ] | ||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
| @ -408,8 +368,7 @@ test("restore with cache found for restore key", async () => { | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
|  | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     await run(); | ||||
| @ -418,28 +377,12 @@ test("restore with cache found for restore key", async () => { | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|               "-xz", | ||||
|               "--force-local", | ||||
|               "-f", | ||||
|               archivePath.replace(/\\/g, "/"), | ||||
|               "-C", | ||||
|               cachePath.replace(/\\/g, "/") | ||||
|           ] | ||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||
|  | ||||
| @ -1,19 +1,17 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { Events, Inputs } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import run from "../src/save"; | ||||
| import * as tar from "../src/tar"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
|  | ||||
| jest.mock("@actions/core"); | ||||
| jest.mock("@actions/exec"); | ||||
| jest.mock("@actions/io"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
| jest.mock("../src/cacheHttpClient"); | ||||
| jest.mock("../src/tar"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
|  | ||||
| beforeAll(() => { | ||||
|     jest.spyOn(core, "getInput").mockImplementation((name, options) => { | ||||
| @ -49,10 +47,6 @@ beforeAll(() => { | ||||
|     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { | ||||
|         return Promise.resolve("/foo/bar"); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | ||||
|         return Promise.resolve(tool); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| beforeEach(() => { | ||||
| @ -128,7 +122,7 @@ test("save with exact match returns early", async () => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
| @ -136,7 +130,7 @@ test("save with exact match returns early", async () => { | ||||
|         `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` | ||||
|     ); | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(0); | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(0); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
| @ -198,9 +192,9 @@ test("save with large cache outputs warning", async () => { | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|  | ||||
|     const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit | ||||
|     const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit | ||||
|     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { | ||||
|         return cacheSize; | ||||
|     }); | ||||
| @ -209,25 +203,12 @@ test("save with large cache outputs warning", async () => { | ||||
|  | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|             "-cz", | ||||
|             "--force-local", | ||||
|             "-f", | ||||
|             archivePath.replace(/\\/g, "/"), | ||||
|             "-C", | ||||
|             cachePath.replace(/\\/g, "/"), | ||||
|             "." | ||||
|         ] | ||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         "Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache." | ||||
|         "Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache." | ||||
|     ); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| @ -259,12 +240,7 @@ test("save with server error outputs warning", async () => { | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const cacheId = 4; | ||||
|     const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { | ||||
|         return Promise.resolve(cacheId); | ||||
|     }); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|  | ||||
|     const saveCacheMock = jest | ||||
|         .spyOn(cacheHttpClient, "saveCache") | ||||
| @ -274,29 +250,13 @@ test("save with server error outputs warning", async () => { | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||
|  | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|             "-cz", | ||||
|             "--force-local", | ||||
|             "-f", | ||||
|             archivePath.replace(/\\/g, "/"), | ||||
|             "-C", | ||||
|             cachePath.replace(/\\/g, "/"), | ||||
|             "." | ||||
|         ] | ||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||
| @ -329,40 +289,18 @@ test("save with valid inputs uploads a cache", async () => { | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const cacheId = 4; | ||||
|     const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { | ||||
|         return Promise.resolve(cacheId); | ||||
|     }); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||
|  | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|             "-cz", | ||||
|             "--force-local", | ||||
|             "-f", | ||||
|             archivePath.replace(/\\/g, "/"), | ||||
|             "-C", | ||||
|             cachePath.replace(/\\/g, "/"), | ||||
|             "." | ||||
|         ] | ||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
							
								
								
									
										58
									
								
								__tests__/tar.test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								__tests__/tar.test.ts
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,58 @@ | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as tar from "../src/tar"; | ||||
|  | ||||
| jest.mock("@actions/exec"); | ||||
| jest.mock("@actions/io"); | ||||
|  | ||||
| beforeAll(() => { | ||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | ||||
|         return Promise.resolve(tool); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test("extract tar", async () => { | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const archivePath = "cache.tar"; | ||||
|     const targetDirectory = "~/.npm/cache"; | ||||
|     await tar.extractTar(archivePath, targetDirectory); | ||||
|  | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||
|         "-xz", | ||||
|         "-f", | ||||
|         archivePath, | ||||
|         "-C", | ||||
|         targetDirectory | ||||
|     ]); | ||||
| }); | ||||
|  | ||||
| test("create tar", async () => { | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const archivePath = "cache.tar"; | ||||
|     const sourceDirectory = "~/.npm/cache"; | ||||
|     await tar.createTar(archivePath, sourceDirectory); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||
|         "-cz", | ||||
|         "-f", | ||||
|         archivePath, | ||||
|         "-C", | ||||
|         sourceDirectory, | ||||
|         "." | ||||
|     ]); | ||||
| }); | ||||
							
								
								
									
										241
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										241
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1496,55 +1496,47 @@ const fs = __importStar(__webpack_require__(747)); | ||||
| const Handlers_1 = __webpack_require__(941); | ||||
| const HttpClient_1 = __webpack_require__(874); | ||||
| const RestClient_1 = __webpack_require__(105); | ||||
| const utils = __importStar(__webpack_require__(443)); | ||||
| function isSuccessStatusCode(statusCode) { | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
| function getCacheApiUrl() { | ||||
| function getCacheUrl() { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|     const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "").replace("pipelines", "artifactcache"); | ||||
|     if (!baseUrl) { | ||||
|     if (!cacheUrl) { | ||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||
|     } | ||||
|     core.debug(`Cache Url: ${baseUrl}`); | ||||
|     return `${baseUrl}_apis/artifactcache/`; | ||||
|     core.debug(`Cache Url: ${cacheUrl}`); | ||||
|     return cacheUrl; | ||||
| } | ||||
| function createAcceptHeader(type, apiVersion) { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
| function getRequestOptions() { | ||||
|     const requestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||
|     }; | ||||
|     return requestOptions; | ||||
| } | ||||
| function createRestClient() { | ||||
| function getCacheEntry(keys) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ | ||||
|         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
| } | ||||
| function getCacheEntry(keys) { | ||||
|     var _a; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const response = yield restClient.get(resource, getRequestOptions()); | ||||
|         if (response.statusCode === 204) { | ||||
|             return null; | ||||
|         } | ||||
|         if (!isSuccessStatusCode(response.statusCode)) { | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|         if (!cacheResult || !cacheResult.archiveLocation) { | ||||
|             throw new Error("Cache not found."); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
|         core.setSecret(cacheResult.archiveLocation); | ||||
|         core.debug(`Cache Result:`); | ||||
|         core.debug(JSON.stringify(cacheResult)); | ||||
|         return cacheResult; | ||||
| @ -1560,102 +1552,34 @@ function pipeResponseToStream(response, stream) { | ||||
|         }); | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath) { | ||||
| function downloadCache(cacheEntry, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const stream = fs.createWriteStream(archivePath); | ||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||
|         const downloadResponse = yield httpClient.get(archiveLocation); | ||||
|         // eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||||
|         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); | ||||
|         yield pipeResponseToStream(downloadResponse, stream); | ||||
|     }); | ||||
| } | ||||
| exports.downloadCache = downloadCache; | ||||
| // Reserve Cache | ||||
| function reserveCache(key) { | ||||
|     var _a, _b, _c; | ||||
| function saveCache(key, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const reserveCacheRequest = { | ||||
|             key | ||||
|         }; | ||||
|         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); | ||||
|         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); | ||||
|     }); | ||||
| } | ||||
| exports.reserveCache = reserveCache; | ||||
| function getContentRange(start, end) { | ||||
|     // Format: `bytes start-end/filesize | ||||
|     // start and end are inclusive | ||||
|     // filesize can be * | ||||
|     // For a 200 byte chunk starting at byte 0: | ||||
|     // Content-Range: bytes 0-199/* | ||||
|     return `bytes ${start}-${end}/*`; | ||||
| } | ||||
| // function bufferToStream(buffer: Buffer): NodeJS.ReadableStream { | ||||
| //     const stream = new Duplex(); | ||||
| //     stream.push(buffer); | ||||
| //     stream.push(null); | ||||
| //     return stream; | ||||
| // } | ||||
| function uploadChunk(restClient, resourceUrl, data, start, end) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||
|         const stream = fs.createReadStream(archivePath); | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||
|         const postUrl = cacheUrl + resource; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         requestOptions.additionalHeaders = { | ||||
|             "Content-Type": "application/octet-stream", | ||||
|             "Content-Range": getContentRange(start, end) | ||||
|             "Content-Type": "application/octet-stream" | ||||
|         }; | ||||
|         return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function commitCache(restClient, cacheId, filesize) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         const commitCacheRequest = { size: filesize }; | ||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function uploadFile(restClient, cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Upload Chunks | ||||
|         const fileSize = fs.statSync(archivePath).size; | ||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||
|         const responses = []; | ||||
|         const fd = fs.openSync(archivePath, "r"); | ||||
|         const concurrency = 16; // # of HTTP requests in parallel | ||||
|         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks | ||||
|         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
|         const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|         core.debug("Awaiting all uploads"); | ||||
|         let offset = 0; | ||||
|         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||
|             while (offset < fileSize) { | ||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||
|                 const start = offset; | ||||
|                 const end = offset + chunkSize - 1; | ||||
|                 offset += MAX_CHUNK_SIZE; | ||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||
|             } | ||||
|         }))); | ||||
|         fs.closeSync(fd); | ||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); | ||||
|         if (failedResponse) { | ||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); | ||||
|         } | ||||
|         return; | ||||
|     }); | ||||
| } | ||||
| function saveCache(cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         core.debug("Upload cache"); | ||||
|         yield uploadFile(restClient, cacheId, archivePath); | ||||
|         core.debug("Commiting cache"); | ||||
|         // Commit Cache | ||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||
|         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); | ||||
|         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
|             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); | ||||
|         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         core.info("Cache saved successfully"); | ||||
|     }); | ||||
| @ -3067,14 +2991,12 @@ var __importStar = (this && this.__importStar) || function (mod) { | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const exec_1 = __webpack_require__(986); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const path = __importStar(__webpack_require__(622)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||
| const constants_1 = __webpack_require__(694); | ||||
| const tar_1 = __webpack_require__(943); | ||||
| const utils = __importStar(__webpack_require__(443)); | ||||
| function run() { | ||||
|     var _a, _b; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             // Validate inputs, this can cause task failure | ||||
| @ -3112,7 +3034,7 @@ function run() { | ||||
|             } | ||||
|             try { | ||||
|                 const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); | ||||
|                 if (!cacheEntry || !((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) { | ||||
|                 if (!cacheEntry) { | ||||
|                     core.info(`Cache not found for input keys: ${keys.join(", ")}.`); | ||||
|                     return; | ||||
|                 } | ||||
| @ -3121,28 +3043,10 @@ function run() { | ||||
|                 // Store the cache result | ||||
|                 utils.setCacheState(cacheEntry); | ||||
|                 // Download the cache from the cache entry | ||||
|                 yield cacheHttpClient.downloadCache((_b = cacheEntry) === null || _b === void 0 ? void 0 : _b.archiveLocation, archivePath); | ||||
|                 yield exec_1.exec(`md5sum`, [archivePath]); | ||||
|                 yield cacheHttpClient.downloadCache(cacheEntry, archivePath); | ||||
|                 const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|                 core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); | ||||
|                 // Create directory to extract tar into | ||||
|                 yield io.mkdirP(cachePath); | ||||
|                 // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|                 // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|                 const IS_WINDOWS = process.platform === "win32"; | ||||
|                 const args = IS_WINDOWS | ||||
|                     ? [ | ||||
|                         "-xz", | ||||
|                         "--force-local", | ||||
|                         "-f", | ||||
|                         archivePath.replace(/\\/g, "/"), | ||||
|                         "-C", | ||||
|                         cachePath.replace(/\\/g, "/") | ||||
|                     ] | ||||
|                     : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|                 const tarPath = yield io.which("tar", true); | ||||
|                 core.debug(`Tar Path: ${tarPath}`); | ||||
|                 yield exec_1.exec(`"${tarPath}"`, args); | ||||
|                 yield tar_1.extractTar(archivePath, cachePath); | ||||
|                 const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); | ||||
|                 utils.setCacheHitOutput(isExactKeyMatch); | ||||
|                 core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); | ||||
| @ -5238,6 +5142,81 @@ var personalaccesstoken_1 = __webpack_require__(327); | ||||
| exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
|  | ||||
| /***/ 943: | ||||
| /***/ (function(__unusedmodule, exports, __webpack_require__) { | ||||
|  | ||||
| "use strict"; | ||||
|  | ||||
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||
|     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||
|     return new (P || (P = Promise))(function (resolve, reject) { | ||||
|         function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||||
|         function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||||
|         function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||||
|         step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||||
|     }); | ||||
| }; | ||||
| var __importStar = (this && this.__importStar) || function (mod) { | ||||
|     if (mod && mod.__esModule) return mod; | ||||
|     var result = {}; | ||||
|     if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; | ||||
|     result["default"] = mod; | ||||
|     return result; | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const exec_1 = __webpack_require__(986); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const fs_1 = __webpack_require__(747); | ||||
| function getTarPath() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Explicitly use BSD Tar on Windows | ||||
|         const IS_WINDOWS = process.platform === "win32"; | ||||
|         if (IS_WINDOWS) { | ||||
|             const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||
|             if (fs_1.existsSync(systemTar)) { | ||||
|                 return systemTar; | ||||
|             } | ||||
|         } | ||||
|         return yield io.which("tar", true); | ||||
|     }); | ||||
| } | ||||
| function execTar(args) { | ||||
|     var _a, _b; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             const tarPath = yield getTarPath(); | ||||
|             const tarExec = process.platform !== "win32" ? `sudo ${tarPath}` : tarPath; | ||||
|             yield exec_1.exec(`"${tarExec}"`, args); | ||||
|         } | ||||
|         catch (error) { | ||||
|             const IS_WINDOWS = process.platform === "win32"; | ||||
|             if (IS_WINDOWS) { | ||||
|                 throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); | ||||
|             } | ||||
|             throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function extractTar(archivePath, targetDirectory) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Create directory to extract tar into | ||||
|         yield io.mkdirP(targetDirectory); | ||||
|         const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||
|         yield execTar(args); | ||||
|     }); | ||||
| } | ||||
| exports.extractTar = extractTar; | ||||
| function createTar(archivePath, sourceDirectory) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||
|         yield execTar(args); | ||||
|     }); | ||||
| } | ||||
| exports.createTar = createTar; | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
|  | ||||
| /***/ 986: | ||||
|  | ||||
							
								
								
									
										249
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										249
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1496,55 +1496,47 @@ const fs = __importStar(__webpack_require__(747)); | ||||
| const Handlers_1 = __webpack_require__(941); | ||||
| const HttpClient_1 = __webpack_require__(874); | ||||
| const RestClient_1 = __webpack_require__(105); | ||||
| const utils = __importStar(__webpack_require__(443)); | ||||
| function isSuccessStatusCode(statusCode) { | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
| function getCacheApiUrl() { | ||||
| function getCacheUrl() { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|     const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "").replace("pipelines", "artifactcache"); | ||||
|     if (!baseUrl) { | ||||
|     if (!cacheUrl) { | ||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||
|     } | ||||
|     core.debug(`Cache Url: ${baseUrl}`); | ||||
|     return `${baseUrl}_apis/artifactcache/`; | ||||
|     core.debug(`Cache Url: ${cacheUrl}`); | ||||
|     return cacheUrl; | ||||
| } | ||||
| function createAcceptHeader(type, apiVersion) { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
| function getRequestOptions() { | ||||
|     const requestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||
|     }; | ||||
|     return requestOptions; | ||||
| } | ||||
| function createRestClient() { | ||||
| function getCacheEntry(keys) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ | ||||
|         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
| } | ||||
| function getCacheEntry(keys) { | ||||
|     var _a; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const response = yield restClient.get(resource, getRequestOptions()); | ||||
|         if (response.statusCode === 204) { | ||||
|             return null; | ||||
|         } | ||||
|         if (!isSuccessStatusCode(response.statusCode)) { | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|         if (!cacheResult || !cacheResult.archiveLocation) { | ||||
|             throw new Error("Cache not found."); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
|         core.setSecret(cacheResult.archiveLocation); | ||||
|         core.debug(`Cache Result:`); | ||||
|         core.debug(JSON.stringify(cacheResult)); | ||||
|         return cacheResult; | ||||
| @ -1560,102 +1552,34 @@ function pipeResponseToStream(response, stream) { | ||||
|         }); | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath) { | ||||
| function downloadCache(cacheEntry, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const stream = fs.createWriteStream(archivePath); | ||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||
|         const downloadResponse = yield httpClient.get(archiveLocation); | ||||
|         // eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||||
|         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); | ||||
|         yield pipeResponseToStream(downloadResponse, stream); | ||||
|     }); | ||||
| } | ||||
| exports.downloadCache = downloadCache; | ||||
| // Reserve Cache | ||||
| function reserveCache(key) { | ||||
|     var _a, _b, _c; | ||||
| function saveCache(key, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const reserveCacheRequest = { | ||||
|             key | ||||
|         }; | ||||
|         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); | ||||
|         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); | ||||
|     }); | ||||
| } | ||||
| exports.reserveCache = reserveCache; | ||||
| function getContentRange(start, end) { | ||||
|     // Format: `bytes start-end/filesize | ||||
|     // start and end are inclusive | ||||
|     // filesize can be * | ||||
|     // For a 200 byte chunk starting at byte 0: | ||||
|     // Content-Range: bytes 0-199/* | ||||
|     return `bytes ${start}-${end}/*`; | ||||
| } | ||||
| // function bufferToStream(buffer: Buffer): NodeJS.ReadableStream { | ||||
| //     const stream = new Duplex(); | ||||
| //     stream.push(buffer); | ||||
| //     stream.push(null); | ||||
| //     return stream; | ||||
| // } | ||||
| function uploadChunk(restClient, resourceUrl, data, start, end) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||
|         const stream = fs.createReadStream(archivePath); | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||
|         const postUrl = cacheUrl + resource; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         requestOptions.additionalHeaders = { | ||||
|             "Content-Type": "application/octet-stream", | ||||
|             "Content-Range": getContentRange(start, end) | ||||
|             "Content-Type": "application/octet-stream" | ||||
|         }; | ||||
|         return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function commitCache(restClient, cacheId, filesize) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         const commitCacheRequest = { size: filesize }; | ||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function uploadFile(restClient, cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Upload Chunks | ||||
|         const fileSize = fs.statSync(archivePath).size; | ||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||
|         const responses = []; | ||||
|         const fd = fs.openSync(archivePath, "r"); | ||||
|         const concurrency = 16; // # of HTTP requests in parallel | ||||
|         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks | ||||
|         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
|         const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|         core.debug("Awaiting all uploads"); | ||||
|         let offset = 0; | ||||
|         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||
|             while (offset < fileSize) { | ||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||
|                 const start = offset; | ||||
|                 const end = offset + chunkSize - 1; | ||||
|                 offset += MAX_CHUNK_SIZE; | ||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||
|             } | ||||
|         }))); | ||||
|         fs.closeSync(fd); | ||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); | ||||
|         if (failedResponse) { | ||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); | ||||
|         } | ||||
|         return; | ||||
|     }); | ||||
| } | ||||
| function saveCache(cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         core.debug("Upload cache"); | ||||
|         yield uploadFile(restClient, cacheId, archivePath); | ||||
|         core.debug("Commiting cache"); | ||||
|         // Commit Cache | ||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||
|         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); | ||||
|         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
|             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); | ||||
|         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         core.info("Cache saved successfully"); | ||||
|     }); | ||||
| @ -2955,11 +2879,10 @@ var __importStar = (this && this.__importStar) || function (mod) { | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const exec_1 = __webpack_require__(986); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const path = __importStar(__webpack_require__(622)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||
| const constants_1 = __webpack_require__(694); | ||||
| const tar_1 = __webpack_require__(943); | ||||
| const utils = __importStar(__webpack_require__(443)); | ||||
| function run() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
| @ -2981,44 +2904,19 @@ function run() { | ||||
|                 core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); | ||||
|                 return; | ||||
|             } | ||||
|             core.debug("Reserving Cache"); | ||||
|             const cacheId = yield cacheHttpClient.reserveCache(primaryKey); | ||||
|             if (cacheId < 0) { | ||||
|                 core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`); | ||||
|                 return; | ||||
|             } | ||||
|             core.debug(`Cache ID: ${cacheId}`); | ||||
|             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||
|             core.debug(`Cache Path: ${cachePath}`); | ||||
|             const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|             const IS_WINDOWS = process.platform === "win32"; | ||||
|             const args = IS_WINDOWS | ||||
|                 ? [ | ||||
|                     "-cz", | ||||
|                     "--force-local", | ||||
|                     "-f", | ||||
|                     archivePath.replace(/\\/g, "/"), | ||||
|                     "-C", | ||||
|                     cachePath.replace(/\\/g, "/"), | ||||
|                     "." | ||||
|                 ] | ||||
|                 : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|             const tarPath = yield io.which("tar", true); | ||||
|             core.debug(`Tar Path: ${tarPath}`); | ||||
|             yield exec_1.exec(`"${tarPath}"`, args); | ||||
|             const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit | ||||
|             yield tar_1.createTar(archivePath, cachePath); | ||||
|             const fileSizeLimit = 400 * 1024 * 1024; // 400MB | ||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|             core.debug(`File Size: ${archiveFileSize}`); | ||||
|             if (archiveFileSize > fileSizeLimit) { | ||||
|                 utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024 * 1024))} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`); | ||||
|                 utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`); | ||||
|                 return; | ||||
|             } | ||||
|             yield exec_1.exec(`md5sum`, [archivePath]); | ||||
|             core.debug("Saving Cache"); | ||||
|             yield cacheHttpClient.saveCache(cacheId, archivePath); | ||||
|             yield cacheHttpClient.saveCache(primaryKey, archivePath); | ||||
|         } | ||||
|         catch (error) { | ||||
|             utils.logWarning(error.message); | ||||
| @ -5218,6 +5116,81 @@ var personalaccesstoken_1 = __webpack_require__(327); | ||||
| exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
|  | ||||
| /***/ 943: | ||||
| /***/ (function(__unusedmodule, exports, __webpack_require__) { | ||||
|  | ||||
| "use strict"; | ||||
|  | ||||
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||
|     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||
|     return new (P || (P = Promise))(function (resolve, reject) { | ||||
|         function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||||
|         function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||||
|         function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||||
|         step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||||
|     }); | ||||
| }; | ||||
| var __importStar = (this && this.__importStar) || function (mod) { | ||||
|     if (mod && mod.__esModule) return mod; | ||||
|     var result = {}; | ||||
|     if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; | ||||
|     result["default"] = mod; | ||||
|     return result; | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const exec_1 = __webpack_require__(986); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const fs_1 = __webpack_require__(747); | ||||
| function getTarPath() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Explicitly use BSD Tar on Windows | ||||
|         const IS_WINDOWS = process.platform === "win32"; | ||||
|         if (IS_WINDOWS) { | ||||
|             const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||
|             if (fs_1.existsSync(systemTar)) { | ||||
|                 return systemTar; | ||||
|             } | ||||
|         } | ||||
|         return yield io.which("tar", true); | ||||
|     }); | ||||
| } | ||||
| function execTar(args) { | ||||
|     var _a, _b; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             const tarPath = yield getTarPath(); | ||||
|             const tarExec = process.platform !== "win32" ? `sudo ${tarPath}` : tarPath; | ||||
|             yield exec_1.exec(`"${tarExec}"`, args); | ||||
|         } | ||||
|         catch (error) { | ||||
|             const IS_WINDOWS = process.platform === "win32"; | ||||
|             if (IS_WINDOWS) { | ||||
|                 throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); | ||||
|             } | ||||
|             throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function extractTar(archivePath, targetDirectory) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Create directory to extract tar into | ||||
|         yield io.mkdirP(targetDirectory); | ||||
|         const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||
|         yield execTar(args); | ||||
|     }); | ||||
| } | ||||
| exports.extractTar = extractTar; | ||||
| function createTar(archivePath, sourceDirectory) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||
|         yield execTar(args); | ||||
|     }); | ||||
| } | ||||
| exports.createTar = createTar; | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
|  | ||||
| /***/ 986: | ||||
|  | ||||
							
								
								
									
										19
									
								
								examples.md
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								examples.md
									
									
									
									
									
								
							| @ -1,6 +1,6 @@ | ||||
| # Examples | ||||
|  | ||||
| - [C# - Nuget](#c---nuget) | ||||
| - [C# - NuGet](#c---nuget) | ||||
| - [Elixir - Mix](#elixir---mix) | ||||
| - [Go - Modules](#go---modules) | ||||
| - [Java - Gradle](#java---gradle) | ||||
| @ -14,7 +14,7 @@ | ||||
| - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) | ||||
| - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) | ||||
|  | ||||
| ## C# - Nuget | ||||
| ## C# - NuGet | ||||
| Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): | ||||
|  | ||||
| ```yaml | ||||
| @ -26,6 +26,21 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa | ||||
|       ${{ runner.os }}-nuget- | ||||
| ``` | ||||
|  | ||||
| Depending on the environment, huge packages might be pre-installed in the global cache folder. | ||||
| If you do not want to include them, consider to move the cache folder like below. | ||||
| >Note: This workflow does not work for projects that require files to be placed in user profile package folder | ||||
| ```yaml | ||||
| env: | ||||
|   NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages | ||||
| steps: | ||||
|   - uses: actions/cache@v1 | ||||
|     with: | ||||
|       path: ${{ github.workspace }}/.nuget/packages | ||||
|       key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} | ||||
|       restore-keys: | | ||||
|         ${{ runner.os }}-nuget- | ||||
| ``` | ||||
|  | ||||
| ## Elixir - Mix | ||||
| ```yaml | ||||
| - uses: actions/cache@v1 | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| { | ||||
|   "name": "cache", | ||||
|   "version": "1.1.0", | ||||
|   "version": "1.0.3", | ||||
|   "private": true, | ||||
|   "description": "Cache dependencies and build outputs", | ||||
|   "main": "dist/restore/index.js", | ||||
|  | ||||
| @ -3,37 +3,24 @@ import * as fs from "fs"; | ||||
| import { BearerCredentialHandler } from "typed-rest-client/Handlers"; | ||||
| import { HttpClient } from "typed-rest-client/HttpClient"; | ||||
| import { IHttpClientResponse } from "typed-rest-client/Interfaces"; | ||||
| import { | ||||
|     IRequestOptions, | ||||
|     RestClient, | ||||
|     IRestResponse | ||||
| } from "typed-rest-client/RestClient"; | ||||
| import { | ||||
|     ArtifactCacheEntry, | ||||
|     CommitCacheRequest, | ||||
|     ReserveCacheRequest, | ||||
|     ReserverCacheResponse | ||||
| } from "./contracts"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
| import { IRequestOptions, RestClient } from "typed-rest-client/RestClient"; | ||||
| import { ArtifactCacheEntry } from "./contracts"; | ||||
|  | ||||
| function isSuccessStatusCode(statusCode: number): boolean { | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
| function getCacheApiUrl(): string { | ||||
| function getCacheUrl(): string { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     const baseUrl: string = ( | ||||
|     const cacheUrl: string = ( | ||||
|         process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "" | ||||
|     ).replace("pipelines", "artifactcache"); | ||||
|     if (!baseUrl) { | ||||
|     if (!cacheUrl) { | ||||
|         throw new Error( | ||||
|             "Cache Service Url not found, unable to restore cache." | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     core.debug(`Cache Url: ${baseUrl}`); | ||||
|     return `${baseUrl}_apis/artifactcache/`; | ||||
|     core.debug(`Cache Url: ${cacheUrl}`); | ||||
|     return cacheUrl; | ||||
| } | ||||
|  | ||||
| function createAcceptHeader(type: string, apiVersion: string): string { | ||||
| @ -42,26 +29,26 @@ function createAcceptHeader(type: string, apiVersion: string): string { | ||||
|  | ||||
| function getRequestOptions(): IRequestOptions { | ||||
|     const requestOptions: IRequestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||
|     }; | ||||
|  | ||||
|     return requestOptions; | ||||
| } | ||||
|  | ||||
| function createRestClient(): RestClient { | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||
|  | ||||
|     return new RestClient("actions/cache", getCacheApiUrl(), [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
| } | ||||
|  | ||||
| export async function getCacheEntry( | ||||
|     keys: string[] | ||||
| ): Promise<ArtifactCacheEntry | null> { | ||||
|     const restClient = createRestClient(); | ||||
|     const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|     const cacheUrl = getCacheUrl(); | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||
|  | ||||
|     const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent( | ||||
|         keys.join(",") | ||||
|     )}`; | ||||
|  | ||||
|     const restClient = new RestClient("actions/cache", cacheUrl, [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
|  | ||||
|     const response = await restClient.get<ArtifactCacheEntry>( | ||||
|         resource, | ||||
| @ -70,15 +57,14 @@ export async function getCacheEntry( | ||||
|     if (response.statusCode === 204) { | ||||
|         return null; | ||||
|     } | ||||
|     if (!isSuccessStatusCode(response.statusCode)) { | ||||
|     if (response.statusCode !== 200) { | ||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|     } | ||||
|     const cacheResult = response.result; | ||||
|     const cacheDownloadUrl = cacheResult?.archiveLocation; | ||||
|     if (!cacheDownloadUrl) { | ||||
|     if (!cacheResult || !cacheResult.archiveLocation) { | ||||
|         throw new Error("Cache not found."); | ||||
|     } | ||||
|     core.setSecret(cacheDownloadUrl); | ||||
|     core.setSecret(cacheResult.archiveLocation); | ||||
|     core.debug(`Cache Result:`); | ||||
|     core.debug(JSON.stringify(cacheResult)); | ||||
|  | ||||
| @ -97,165 +83,46 @@ async function pipeResponseToStream( | ||||
| } | ||||
|  | ||||
| export async function downloadCache( | ||||
|     archiveLocation: string, | ||||
|     cacheEntry: ArtifactCacheEntry, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const stream = fs.createWriteStream(archivePath); | ||||
|     const httpClient = new HttpClient("actions/cache"); | ||||
|     const downloadResponse = await httpClient.get(archiveLocation); | ||||
|     // eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||||
|     const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!); | ||||
|     await pipeResponseToStream(downloadResponse, stream); | ||||
| } | ||||
|  | ||||
| // Reserve Cache | ||||
| export async function reserveCache(key: string): Promise<number> { | ||||
|     const restClient = createRestClient(); | ||||
| export async function saveCache( | ||||
|     key: string, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const stream = fs.createReadStream(archivePath); | ||||
|  | ||||
|     const reserveCacheRequest: ReserveCacheRequest = { | ||||
|         key | ||||
|     }; | ||||
|     const response = await restClient.create<ReserverCacheResponse>( | ||||
|         "caches", | ||||
|         reserveCacheRequest, | ||||
|         getRequestOptions() | ||||
|     ); | ||||
|     const cacheUrl = getCacheUrl(); | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||
|  | ||||
|     return response?.result?.cacheId ?? -1; | ||||
| } | ||||
|     const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||
|     const postUrl = cacheUrl + resource; | ||||
|  | ||||
| function getContentRange(start: number, end: number): string { | ||||
|     // Format: `bytes start-end/filesize | ||||
|     // start and end are inclusive | ||||
|     // filesize can be * | ||||
|     // For a 200 byte chunk starting at byte 0: | ||||
|     // Content-Range: bytes 0-199/* | ||||
|     return `bytes ${start}-${end}/*`; | ||||
| } | ||||
|     const restClient = new RestClient("actions/cache", undefined, [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
|  | ||||
| async function uploadChunk( | ||||
|     restClient: RestClient, | ||||
|     resourceUrl: string, | ||||
|     data: NodeJS.ReadableStream, | ||||
|     start: number, | ||||
|     end: number | ||||
| ): Promise<IRestResponse<void>> { | ||||
|     core.debug( | ||||
|         `Uploading chunk of size ${end - | ||||
|             start + | ||||
|             1} bytes at offset ${start} with content range: ${getContentRange( | ||||
|             start, | ||||
|             end | ||||
|         )}` | ||||
|     ); | ||||
|     const requestOptions = getRequestOptions(); | ||||
|     requestOptions.additionalHeaders = { | ||||
|         "Content-Type": "application/octet-stream", | ||||
|         "Content-Range": getContentRange(start, end) | ||||
|         "Content-Type": "application/octet-stream" | ||||
|     }; | ||||
|  | ||||
|     return await restClient.uploadStream<void>( | ||||
|         "PATCH", | ||||
|         resourceUrl, | ||||
|         data, | ||||
|     const response = await restClient.uploadStream<void>( | ||||
|         "POST", | ||||
|         postUrl, | ||||
|         stream, | ||||
|         requestOptions | ||||
|     ); | ||||
| } | ||||
|  | ||||
| async function uploadFile( | ||||
|     restClient: RestClient, | ||||
|     cacheId: number, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     // Upload Chunks | ||||
|     const fileSize = fs.statSync(archivePath).size; | ||||
|     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||
|     const responses: IRestResponse<void>[] = []; | ||||
|     const fd = fs.openSync(archivePath, "r"); | ||||
|  | ||||
|     const concurrency = 4; // # of HTTP requests in parallel | ||||
|     const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks | ||||
|     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
|  | ||||
|     const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|     core.debug("Awaiting all uploads"); | ||||
|     let offset = 0; | ||||
|     await Promise.all( | ||||
|         parallelUploads.map(async () => { | ||||
|             while (offset < fileSize) { | ||||
|                 const chunkSize = | ||||
|                     offset + MAX_CHUNK_SIZE > fileSize | ||||
|                         ? fileSize - offset | ||||
|                         : MAX_CHUNK_SIZE; | ||||
|                 const start = offset; | ||||
|                 const end = offset + chunkSize - 1; | ||||
|                 offset += MAX_CHUNK_SIZE; | ||||
|                 const chunk = fs.createReadStream(archivePath, { | ||||
|                     fd, | ||||
|                     start, | ||||
|                     end, | ||||
|                     autoClose: false | ||||
|                 }); | ||||
|                 responses.push( | ||||
|                     await uploadChunk( | ||||
|                         restClient, | ||||
|                         resourceUrl, | ||||
|                         chunk, | ||||
|                         start, | ||||
|                         end | ||||
|                     ) | ||||
|                 ); | ||||
|             } | ||||
|         }) | ||||
|     ); | ||||
|  | ||||
|     fs.closeSync(fd); | ||||
|  | ||||
|     const failedResponse = responses.find( | ||||
|         x => !isSuccessStatusCode(x.statusCode) | ||||
|     ); | ||||
|     if (failedResponse) { | ||||
|         throw new Error( | ||||
|             `Cache service responded with ${failedResponse.statusCode} during chunk upload.` | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     return; | ||||
| } | ||||
|  | ||||
| async function commitCache( | ||||
|     restClient: RestClient, | ||||
|     cacheId: number, | ||||
|     filesize: number | ||||
| ): Promise<IRestResponse<void>> { | ||||
|     const requestOptions = getRequestOptions(); | ||||
|     const commitCacheRequest: CommitCacheRequest = { size: filesize }; | ||||
|     return await restClient.create( | ||||
|         `caches/${cacheId.toString()}`, | ||||
|         commitCacheRequest, | ||||
|         requestOptions | ||||
|     ); | ||||
| } | ||||
|  | ||||
| export async function saveCache( | ||||
|     cacheId: number, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const restClient = createRestClient(); | ||||
|  | ||||
|     core.debug("Upload cache"); | ||||
|     await uploadFile(restClient, cacheId, archivePath); | ||||
|  | ||||
|     // Commit Cache | ||||
|     core.debug("Commiting cache"); | ||||
|     const cacheSize = utils.getArchiveFileSize(archivePath); | ||||
|     const commitCacheResponse = await commitCache( | ||||
|         restClient, | ||||
|         cacheId, | ||||
|         cacheSize | ||||
|     ); | ||||
|     if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
|         throw new Error( | ||||
|             `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` | ||||
|         ); | ||||
|     if (response.statusCode !== 200) { | ||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|     } | ||||
|  | ||||
|     core.info("Cache saved successfully"); | ||||
|  | ||||
							
								
								
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							| @ -4,16 +4,3 @@ export interface ArtifactCacheEntry { | ||||
|     creationTime?: string; | ||||
|     archiveLocation?: string; | ||||
| } | ||||
|  | ||||
| export interface CommitCacheRequest { | ||||
|     size: number; | ||||
| } | ||||
|  | ||||
| export interface ReserveCacheRequest { | ||||
|     key: string; | ||||
|     version?: string; | ||||
| } | ||||
|  | ||||
| export interface ReserverCacheResponse { | ||||
|     cacheId: number; | ||||
| } | ||||
|  | ||||
| @ -1,9 +1,8 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import { exec } from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { extractTar } from "./tar"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
|  | ||||
| async function run(): Promise<void> { | ||||
| @ -61,7 +60,7 @@ async function run(): Promise<void> { | ||||
|  | ||||
|         try { | ||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); | ||||
|             if (!cacheEntry || !cacheEntry?.archiveLocation) { | ||||
|             if (!cacheEntry) { | ||||
|                 core.info( | ||||
|                     `Cache not found for input keys: ${keys.join(", ")}.` | ||||
|                 ); | ||||
| @ -78,10 +77,7 @@ async function run(): Promise<void> { | ||||
|             utils.setCacheState(cacheEntry); | ||||
|  | ||||
|             // Download the cache from the cache entry | ||||
|             await cacheHttpClient.downloadCache( | ||||
|                 cacheEntry?.archiveLocation, | ||||
|                 archivePath | ||||
|             ); | ||||
|             await cacheHttpClient.downloadCache(cacheEntry, archivePath); | ||||
|  | ||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|             core.info( | ||||
| @ -90,27 +86,7 @@ async function run(): Promise<void> { | ||||
|                 )} MB (${archiveFileSize} B)` | ||||
|             ); | ||||
|  | ||||
|             // Create directory to extract tar into | ||||
|             await io.mkdirP(cachePath); | ||||
|  | ||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|             const IS_WINDOWS = process.platform === "win32"; | ||||
|             const args = IS_WINDOWS | ||||
|                 ? [ | ||||
|                       "-xz", | ||||
|                       "--force-local", | ||||
|                       "-f", | ||||
|                       archivePath.replace(/\\/g, "/"), | ||||
|                       "-C", | ||||
|                       cachePath.replace(/\\/g, "/") | ||||
|                   ] | ||||
|                 : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|  | ||||
|             const tarPath = await io.which("tar", true); | ||||
|             core.debug(`Tar Path: ${tarPath}`); | ||||
|  | ||||
|             await exec(`"${tarPath}"`, args); | ||||
|             await extractTar(archivePath, cachePath); | ||||
|  | ||||
|             const isExactKeyMatch = utils.isExactKeyMatch( | ||||
|                 primaryKey, | ||||
|  | ||||
							
								
								
									
										40
									
								
								src/save.ts
									
									
									
									
									
								
							
							
						
						
									
										40
									
								
								src/save.ts
									
									
									
									
									
								
							| @ -1,9 +1,8 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import { exec } from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { createTar } from "./tar"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
|  | ||||
| async function run(): Promise<void> { | ||||
| @ -35,15 +34,6 @@ async function run(): Promise<void> { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         core.debug("Reserving Cache"); | ||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey); | ||||
|         if (cacheId < 0) { | ||||
|             core.info( | ||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|         core.debug(`Cache ID: ${cacheId}`); | ||||
|         const cachePath = utils.resolvePath( | ||||
|             core.getInput(Inputs.Path, { required: true }) | ||||
|         ); | ||||
| @ -55,39 +45,21 @@ async function run(): Promise<void> { | ||||
|         ); | ||||
|         core.debug(`Archive Path: ${archivePath}`); | ||||
|  | ||||
|         // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|         // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|         const IS_WINDOWS = process.platform === "win32"; | ||||
|         const args = IS_WINDOWS | ||||
|             ? [ | ||||
|                   "-cz", | ||||
|                   "--force-local", | ||||
|                   "-f", | ||||
|                   archivePath.replace(/\\/g, "/"), | ||||
|                   "-C", | ||||
|                   cachePath.replace(/\\/g, "/"), | ||||
|                   "." | ||||
|               ] | ||||
|             : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|         await createTar(archivePath, cachePath); | ||||
|  | ||||
|         const tarPath = await io.which("tar", true); | ||||
|         core.debug(`Tar Path: ${tarPath}`); | ||||
|         await exec(`"${tarPath}"`, args); | ||||
|  | ||||
|         const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit | ||||
|         const fileSizeLimit = 400 * 1024 * 1024; // 400MB | ||||
|         const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|         core.debug(`File Size: ${archiveFileSize}`); | ||||
|         if (archiveFileSize > fileSizeLimit) { | ||||
|             utils.logWarning( | ||||
|                 `Cache size of ~${Math.round( | ||||
|                     archiveFileSize / (1024 * 1024 * 1024) | ||||
|                 )} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` | ||||
|                     archiveFileSize / (1024 * 1024) | ||||
|                 )} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         core.debug("Saving Cache"); | ||||
|         await cacheHttpClient.saveCache(cacheId, archivePath); | ||||
|         await cacheHttpClient.saveCache(primaryKey, archivePath); | ||||
|     } catch (error) { | ||||
|         utils.logWarning(error.message); | ||||
|     } | ||||
|  | ||||
							
								
								
									
										49
									
								
								src/tar.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								src/tar.ts
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,49 @@ | ||||
| import { exec } from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import { existsSync } from "fs"; | ||||
|  | ||||
| async function getTarPath(): Promise<string> { | ||||
|     // Explicitly use BSD Tar on Windows | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     if (IS_WINDOWS) { | ||||
|         const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||
|         if (existsSync(systemTar)) { | ||||
|             return systemTar; | ||||
|         } | ||||
|     } | ||||
|     return await io.which("tar", true); | ||||
| } | ||||
|  | ||||
| async function execTar(args: string[]): Promise<void> { | ||||
|     try { | ||||
|         const tarPath = await getTarPath(); | ||||
|         const tarExec = process.platform !== "win32" ? `sudo ${tarPath}` : tarPath; | ||||
|         await exec(`"${tarExec}"`, args); | ||||
|     } catch (error) { | ||||
|         const IS_WINDOWS = process.platform === "win32"; | ||||
|         if (IS_WINDOWS) { | ||||
|             throw new Error( | ||||
|                 `Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.` | ||||
|             ); | ||||
|         } | ||||
|         throw new Error(`Tar failed with error: ${error?.message}`); | ||||
|     } | ||||
| } | ||||
|  | ||||
| export async function extractTar( | ||||
|     archivePath: string, | ||||
|     targetDirectory: string | ||||
| ): Promise<void> { | ||||
|     // Create directory to extract tar into | ||||
|     await io.mkdirP(targetDirectory); | ||||
|     const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||
|     await execTar(args); | ||||
| } | ||||
|  | ||||
| export async function createTar( | ||||
|     archivePath: string, | ||||
|     sourceDirectory: string | ||||
| ): Promise<void> { | ||||
|     const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||
|     await execTar(args); | ||||
| } | ||||
		Reference in New Issue
	
	Block a user
	