mirror of
https://github.com/actions/checkout.git
synced 2024-12-27 11:33:05 +07:00
.
This commit is contained in:
parent
bf32513e49
commit
8ade6aebfa
21
dist/index.js
vendored
21
dist/index.js
vendored
@ -8089,9 +8089,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const assert = __importStar(__webpack_require__(357));
|
const assert = __importStar(__webpack_require__(357));
|
||||||
|
const core = __importStar(__webpack_require__(470));
|
||||||
const exec = __importStar(__webpack_require__(986));
|
const exec = __importStar(__webpack_require__(986));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const github = __importStar(__webpack_require__(469));
|
const github = __importStar(__webpack_require__(469));
|
||||||
|
const io = __importStar(__webpack_require__(1));
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
function downloadRepository(accessToken, owner, repo, ref, repositoryPath) {
|
function downloadRepository(accessToken, owner, repo, ref, repositoryPath) {
|
||||||
@ -8117,14 +8119,29 @@ function downloadRepository(accessToken, owner, repo, ref, repositoryPath) {
|
|||||||
// }
|
// }
|
||||||
const runnerTemp = process.env['RUNNER_TEMP'];
|
const runnerTemp = process.env['RUNNER_TEMP'];
|
||||||
assert.ok(runnerTemp, 'RUNNER_TEMP not defined');
|
assert.ok(runnerTemp, 'RUNNER_TEMP not defined');
|
||||||
const archiveFile = path.join(runnerTemp, 'checkout.tar.gz');
|
const archiveFile = path.join(runnerTemp, 'checkout-archive.tar.gz');
|
||||||
|
yield io.rmRF(archiveFile);
|
||||||
yield fs.promises.writeFile(archiveFile, new Buffer(response.data));
|
yield fs.promises.writeFile(archiveFile, new Buffer(response.data));
|
||||||
yield exec.exec(`ls -la "${archiveFile}"`, [], {
|
yield exec.exec(`ls -la "${archiveFile}"`, [], {
|
||||||
cwd: repositoryPath
|
cwd: repositoryPath
|
||||||
});
|
});
|
||||||
|
const extractPath = path.join(runnerTemp, 'checkout-archive');
|
||||||
|
yield io.rmRF(extractPath);
|
||||||
|
yield io.mkdirP(extractPath);
|
||||||
yield exec.exec(`tar -xzf "${archiveFile}"`, [], {
|
yield exec.exec(`tar -xzf "${archiveFile}"`, [], {
|
||||||
cwd: repositoryPath
|
cwd: extractPath
|
||||||
});
|
});
|
||||||
|
// Determine the real directory to copy (ignore extra dir at root of the archive)
|
||||||
|
const archiveFileNames = yield fs.promises.readdir(extractPath);
|
||||||
|
assert.ok(archiveFileNames.length == 1, 'Expected exactly one directory inside archive');
|
||||||
|
const extraDirectoryName = archiveFileNames[0];
|
||||||
|
core.info(`Resolved ${extraDirectoryName}`); // contains the short SHA
|
||||||
|
const tempRepositoryPath = path.join(extractPath, extraDirectoryName);
|
||||||
|
for (const fileName of tempRepositoryPath) {
|
||||||
|
const sourcePath = path.join(tempRepositoryPath, fileName);
|
||||||
|
const targetPath = path.join(repositoryPath, fileName);
|
||||||
|
yield io.mv(sourcePath, targetPath);
|
||||||
|
}
|
||||||
yield exec.exec(`find .`, [], {
|
yield exec.exec(`find .`, [], {
|
||||||
cwd: repositoryPath
|
cwd: repositoryPath
|
||||||
});
|
});
|
||||||
|
@ -41,14 +41,36 @@ export async function downloadRepository(
|
|||||||
// }
|
// }
|
||||||
const runnerTemp = process.env['RUNNER_TEMP'] as string
|
const runnerTemp = process.env['RUNNER_TEMP'] as string
|
||||||
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
||||||
const archiveFile = path.join(runnerTemp, 'checkout.tar.gz')
|
const archiveFile = path.join(runnerTemp, 'checkout-archive.tar.gz')
|
||||||
|
await io.rmRF(archiveFile)
|
||||||
await fs.promises.writeFile(archiveFile, new Buffer(response.data))
|
await fs.promises.writeFile(archiveFile, new Buffer(response.data))
|
||||||
await exec.exec(`ls -la "${archiveFile}"`, [], {
|
await exec.exec(`ls -la "${archiveFile}"`, [], {
|
||||||
cwd: repositoryPath
|
cwd: repositoryPath
|
||||||
} as ExecOptions)
|
} as ExecOptions)
|
||||||
|
|
||||||
|
const extractPath = path.join(runnerTemp, 'checkout-archive')
|
||||||
|
await io.rmRF(extractPath)
|
||||||
|
await io.mkdirP(extractPath)
|
||||||
await exec.exec(`tar -xzf "${archiveFile}"`, [], {
|
await exec.exec(`tar -xzf "${archiveFile}"`, [], {
|
||||||
cwd: repositoryPath
|
cwd: extractPath
|
||||||
} as ExecOptions)
|
} as ExecOptions)
|
||||||
|
|
||||||
|
// Determine the real directory to copy (ignore extra dir at root of the archive)
|
||||||
|
const archiveFileNames = await fs.promises.readdir(extractPath)
|
||||||
|
assert.ok(
|
||||||
|
archiveFileNames.length == 1,
|
||||||
|
'Expected exactly one directory inside archive'
|
||||||
|
)
|
||||||
|
const extraDirectoryName = archiveFileNames[0]
|
||||||
|
core.info(`Resolved ${extraDirectoryName}`) // contains the short SHA
|
||||||
|
const tempRepositoryPath = path.join(extractPath, extraDirectoryName)
|
||||||
|
|
||||||
|
for (const fileName of tempRepositoryPath) {
|
||||||
|
const sourcePath = path.join(tempRepositoryPath, fileName)
|
||||||
|
const targetPath = path.join(repositoryPath, fileName)
|
||||||
|
await io.mv(sourcePath, targetPath)
|
||||||
|
}
|
||||||
|
|
||||||
await exec.exec(`find .`, [], {
|
await exec.exec(`find .`, [], {
|
||||||
cwd: repositoryPath
|
cwd: repositoryPath
|
||||||
} as ExecOptions)
|
} as ExecOptions)
|
||||||
|
Loading…
Reference in New Issue
Block a user