You've already forked setup-node
mirror of
https://github.com/actions/setup-node.git
synced 2025-07-01 12:43:13 +07:00
Compare commits
67 Commits
v3.5.1
...
update-tem
Author | SHA1 | Date | |
---|---|---|---|
809128768a | |||
2962d3dc11 | |||
7d88b53df7 | |||
ca2d4e0cdd | |||
c7a93deeac | |||
34050076a5 | |||
f8aa08ed8e | |||
e2d34eacc8 | |||
ef9c88b169 | |||
ea800d4ebc | |||
cb95c398f6 | |||
69b2dd252e | |||
e33196f742 | |||
c6722d36aa | |||
8170e22e8f | |||
698d50532e | |||
869f4dd0c7 | |||
10efafcbcf | |||
7d16907b89 | |||
d0d39bda2f | |||
15a2477e08 | |||
7598dbcd6e | |||
a9893b0cfb | |||
5b32c9063c | |||
d98fa11138 | |||
9d255ef245 | |||
e828f9b7f3 | |||
a4fcaaf314 | |||
10f5623502 | |||
fcd18100cc | |||
962678f22c | |||
7c29869aec | |||
ae9f0f7448 | |||
3dbcda8bc2 | |||
f38519bb96 | |||
9227cda3f0 | |||
64ed1c7eab | |||
92a57f4a93 | |||
99e61d697a | |||
3e8819f8f2 | |||
8cd2fb28b8 | |||
c406543918 | |||
92a07fe466 | |||
217387cf3e | |||
2db3663870 | |||
bbe2ac79a1 | |||
f5ab623822 | |||
ca97bf7f80 | |||
fe4d514f1a | |||
8151ea11a4 | |||
772ffdda26 | |||
da188081b1 | |||
377c6dae40 | |||
b28830cbe2 | |||
676975d9aa | |||
d1b197b965 | |||
069a4f8926 | |||
e77eaaccd3 | |||
a69d45adcd | |||
3ae886ede4 | |||
41acaa2e85 | |||
2349c84f5c | |||
6bc15ab23c | |||
9b8fcdc725 | |||
00e1b6691b | |||
16352bb09b | |||
788c6ccbd0 |
6
.eslintignore
Normal file
6
.eslintignore
Normal file
@ -0,0 +1,6 @@
|
||||
# Ignore list
|
||||
/*
|
||||
|
||||
# Do not ignore these folders:
|
||||
!__tests__/
|
||||
!src/
|
51
.eslintrc.js
Normal file
51
.eslintrc.js
Normal file
@ -0,0 +1,51 @@
|
||||
// This is a reusable configuration file copied from https://github.com/actions/reusable-workflows/tree/main/reusable-configurations. Please don't make changes to this file as it's the subject of an automatic update.
|
||||
module.exports = {
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:eslint-plugin-jest/recommended',
|
||||
'eslint-config-prettier'
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
plugins: ['@typescript-eslint', 'eslint-plugin-node', 'eslint-plugin-jest'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-require-imports': 'error',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/ban-ts-comment': [
|
||||
'error',
|
||||
{
|
||||
'ts-ignore': 'allow-with-description'
|
||||
}
|
||||
],
|
||||
'no-console': 'error',
|
||||
'yoda': 'error',
|
||||
'prefer-const': [
|
||||
'error',
|
||||
{
|
||||
destructuring: 'all'
|
||||
}
|
||||
],
|
||||
'no-control-regex': 'off',
|
||||
'no-constant-condition': ['error', {checkLoops: false}],
|
||||
'node/no-extraneous-import': 'error'
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['**/*{test,spec}.ts'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'jest/no-standalone-expect': 'off',
|
||||
'jest/no-conditional-expect': 'off',
|
||||
'no-console': 'off',
|
||||
|
||||
}
|
||||
}
|
||||
],
|
||||
env: {
|
||||
node: true,
|
||||
es6: true,
|
||||
'jest/globals': true
|
||||
}
|
||||
};
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1 +1,2 @@
|
||||
* text=auto eol=lf
|
||||
.licenses/** -diff linguist-generated=true
|
||||
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1 +1 @@
|
||||
blank_issues_enabled: false
|
||||
blank_issues_enabled: false
|
||||
|
17
.github/workflows/basic-validation.yml
vendored
Normal file
17
.github/workflows/basic-validation.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: Basic validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/*
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
call-basic-validation:
|
||||
name: Basic validation
|
||||
uses: actions/reusable-workflows/.github/workflows/basic-validation.yml@main
|
30
.github/workflows/build-test.yml
vendored
30
.github/workflows/build-test.yml
vendored
@ -1,30 +0,0 @@
|
||||
name: build-test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/*
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node 16.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16.x
|
||||
cache: npm
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm run format-check
|
||||
- run: npm test
|
43
.github/workflows/check-dist.yml
vendored
43
.github/workflows/check-dist.yml
vendored
@ -1,9 +1,4 @@
|
||||
# `dist/index.js` is a special file in Actions.
|
||||
# When you reference an action with `uses:` in a workflow,
|
||||
# `index.js` is the code that will run.
|
||||
# For our project, we generate this file through a build process from other source files.
|
||||
# We need to make sure the checked-in `index.js` actually matches what we expect it to be.
|
||||
name: Check dist/
|
||||
name: Check dist
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -17,36 +12,6 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-dist:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node 16.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16.x
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Rebuild the dist/ directory
|
||||
run: npm run build
|
||||
|
||||
- name: Compare the expected and actual dist/ directories
|
||||
run: |
|
||||
if [ "$(git diff --ignore-space-at-eol dist/ | wc -l)" -gt "0" ]; then
|
||||
echo "Detected uncommitted changes after build. See status below:"
|
||||
git diff
|
||||
exit 1
|
||||
fi
|
||||
id: diff
|
||||
|
||||
# If index.js was different than expected, upload the expected version as an artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
call-check-dist:
|
||||
name: Check dist/
|
||||
uses: actions/reusable-workflows/.github/workflows/check-dist.yml@main
|
||||
|
14
.github/workflows/codeql-analysis.yml
vendored
Normal file
14
.github/workflows/codeql-analysis.yml
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
name: CodeQL analysis
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '0 3 * * 0'
|
||||
|
||||
jobs:
|
||||
call-codeQL-analysis:
|
||||
name: CodeQL analysis
|
||||
uses: actions/reusable-workflows/.github/workflows/codeql-analysis.yml@main
|
111
.github/workflows/e2e-cache.yml
vendored
111
.github/workflows/e2e-cache.yml
vendored
@ -75,7 +75,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version: [12, 14, 16]
|
||||
node-version: [14, 16]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Yarn version
|
||||
@ -134,3 +134,112 @@ jobs:
|
||||
- name: Verify node and yarn
|
||||
run: __tests__/verify-node.sh "${{ matrix.node-version }}"
|
||||
shell: bash
|
||||
|
||||
yarn-subprojects:
|
||||
name: Test yarn subprojects
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh yarn1
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# ##[debug]Cache Paths:
|
||||
# ##[debug]["sub2/.yarn/cache","sub3/.yarn/cache","../../../.cache/yarn/v6"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
**/*.lock
|
||||
yarn.lock
|
||||
|
||||
yarn-subprojects-berry-local:
|
||||
name: Test yarn subprojects all locally managed
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh keepcache keepcache
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# ##[info]All dependencies are managed locally by yarn3, the previous cache can be used
|
||||
# ##[debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee","node-cache-Linux-yarn"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
||||
yarn-subprojects-berry-global:
|
||||
name: Test yarn subprojects some locally managed
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh global
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log must
|
||||
# ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" dependencies are managed by yarn 3 locally
|
||||
# ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" dependencies are not managed by yarn 3 locally
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
||||
yarn-subprojects-berry-git:
|
||||
name: Test yarn subprojects managed by git
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: /bin/bash __tests__/prepare-yarn-subprojects.sh keepcache
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# [debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" has .yarn/cache - dependencies are kept in the repository
|
||||
# [debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" has .yarn/cache - dependencies are kept in the repository
|
||||
# [debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
17
.github/workflows/licensed.yml
vendored
17
.github/workflows/licensed.yml
vendored
@ -7,18 +7,9 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
name: Check licenses
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: npm ci
|
||||
- name: Install licensed
|
||||
run: |
|
||||
cd $RUNNER_TEMP
|
||||
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.4.4/licensed-3.4.4-linux-x64.tar.gz
|
||||
sudo tar -xzf licensed.tar.gz
|
||||
sudo mv licensed /usr/local/bin/licensed
|
||||
- run: licensed status
|
||||
call-licensed:
|
||||
name: Licensed
|
||||
uses: actions/reusable-workflows/.github/workflows/licensed.yml@main
|
||||
|
2
.github/workflows/proxy.yml
vendored
2
.github/workflows/proxy.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
|
11
.github/workflows/release-new-action-version.yml
vendored
11
.github/workflows/release-new-action-version.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Release new action version
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
@ -20,8 +21,8 @@ jobs:
|
||||
name: releaseNewActionVersion
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Update the ${{ env.TAG_NAME }} tag
|
||||
uses: actions/publish-action@v0.1.0
|
||||
with:
|
||||
source-tag: ${{ env.TAG_NAME }}
|
||||
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
|
||||
- name: Update the ${{ env.TAG_NAME }} tag
|
||||
uses: actions/publish-action@v0.2.2
|
||||
with:
|
||||
source-tag: ${{ env.TAG_NAME }}
|
||||
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
|
||||
|
11
.github/workflows/update-config-files.yml
vendored
Normal file
11
.github/workflows/update-config-files.yml
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
name: Update configuration files
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * 0'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
call-update-configuration-files:
|
||||
name: Update configuration files
|
||||
uses: actions/reusable-workflows/.github/workflows/update-config-files.yml@main
|
75
.github/workflows/versions.yml
vendored
75
.github/workflows/versions.yml
vendored
@ -51,6 +51,72 @@ jobs:
|
||||
__tests__/verify-node.sh "${BASH_REMATCH[1]}"
|
||||
shell: bash
|
||||
|
||||
v8-canary-syntax:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version:
|
||||
[
|
||||
'20-v8-canary',
|
||||
'20.0.0-v8-canary',
|
||||
'20.0.0-v8-canary20221101e50e45c9f8'
|
||||
]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Verify node and npm
|
||||
run: |
|
||||
canaryVersion="${{ matrix.node-version }}"
|
||||
majorVersion=$(echo $canaryVersion | cut -d- -f1)
|
||||
__tests__/verify-node.sh "$majorVersion"
|
||||
shell: bash
|
||||
|
||||
nightly-syntax:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version:
|
||||
[16.0.0-nightly20210420a0261d231c, 17-nightly, 18.0.0-nightly]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Verify node and npm
|
||||
run: |
|
||||
nightlyVersion="${{ matrix.node-version }}"
|
||||
majorVersion=$(echo $nightlyVersion | cut -d- -f1)
|
||||
__tests__/verify-node.sh "$majorVersion"
|
||||
shell: bash
|
||||
|
||||
rc-syntax:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version: [16.0.0-rc.1, 18.0.0-rc.2, 19.0.0-rc.0]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Verify node and npm
|
||||
run: |
|
||||
rcVersion="${{ matrix.node-version }}"
|
||||
majorVersion=$(echo $rcVersion | cut -d- -f1)
|
||||
__tests__/verify-node.sh "$majorVersion"
|
||||
shell: bash
|
||||
|
||||
manifest:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
@ -92,7 +158,8 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version-file: [.nvmrc, .tool-versions, package.json]
|
||||
node-version-file:
|
||||
[.nvmrc, .tool-versions, .tool-versions-node, package.json]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Remove volta from package.json
|
||||
@ -110,7 +177,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ ubuntu-latest, windows-latest, macos-latest ]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup node from node version file
|
||||
@ -178,7 +245,7 @@ jobs:
|
||||
- name: Get node version
|
||||
run: |
|
||||
latestNodeVersion=$(curl https://nodejs.org/dist/index.json | jq -r '. [0].version')
|
||||
echo "::set-output name=LATEST_NODE_VERSION::$latestNodeVersion"
|
||||
echo "LATEST_NODE_VERSION=$latestNodeVersion" >> $GITHUB_OUTPUT
|
||||
id: version
|
||||
shell: bash
|
||||
- uses: actions/checkout@v3
|
||||
@ -189,7 +256,7 @@ jobs:
|
||||
- name: Retrieve version after install
|
||||
run: |
|
||||
updatedVersion=$(echo $(node --version))
|
||||
echo "::set-output name=NODE_VERSION_UPDATED::$updatedVersion"
|
||||
echo "NODE_VERSION_UPDATED=$updatedVersion" >> $GITHUB_OUTPUT
|
||||
id: updatedVersion
|
||||
shell: bash
|
||||
- name: Compare versions
|
||||
|
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@actions/cache"
|
||||
version: 3.0.4
|
||||
version: 3.2.1
|
||||
type: npm
|
||||
summary: Actions cache lib
|
||||
homepage: https://github.com/actions/toolkit/tree/main/packages/cache
|
||||
|
4
.licenses/npm/@azure/abort-controller.dep.yml
generated
4
.licenses/npm/@azure/abort-controller.dep.yml
generated
@ -1,9 +1,9 @@
|
||||
---
|
||||
name: "@azure/abort-controller"
|
||||
version: 1.0.4
|
||||
version: 1.1.0
|
||||
type: npm
|
||||
summary: Microsoft Azure SDK for JavaScript - Aborter
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/core/abort-controller/README.md
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
|
2
.licenses/npm/@azure/core-http.dep.yml
generated
2
.licenses/npm/@azure/core-http.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@azure/core-http"
|
||||
version: 2.2.4
|
||||
version: 3.0.1
|
||||
type: npm
|
||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||
libraries generated using AutoRest
|
||||
|
@ -1,16 +1,16 @@
|
||||
---
|
||||
name: uuid
|
||||
version: 3.3.2
|
||||
name: "@azure/core-util"
|
||||
version: 1.3.0
|
||||
type: npm
|
||||
summary: RFC4122 (v1, v4, and v5) UUIDs
|
||||
homepage: https://github.com/kelektiv/node-uuid#readme
|
||||
summary: Core library for shared utility methods
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-util/
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2010-2016 Robert Kieffer and other contributors
|
||||
Copyright (c) 2020 Microsoft
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@ -29,11 +29,4 @@ licenses:
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
notices:
|
||||
- sources: AUTHORS
|
||||
text: |-
|
||||
Robert Kieffer <robert@broofa.com>
|
||||
Christoph Tavan <dev@tavan.de>
|
||||
AJ ONeal <coolaj86@gmail.com>
|
||||
Vincent Voyer <vincent@zeroload.net>
|
||||
Roman Shtylman <shtylman@gmail.com>
|
||||
notices: []
|
2
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
2
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@azure/ms-rest-js"
|
||||
version: 2.6.1
|
||||
version: 2.7.0
|
||||
type: npm
|
||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||
libraries generated using AutoRest
|
||||
|
2
.licenses/npm/@azure/storage-blob.dep.yml
generated
2
.licenses/npm/@azure/storage-blob.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@azure/storage-blob"
|
||||
version: 12.9.0
|
||||
version: 12.13.0
|
||||
type: npm
|
||||
summary: Microsoft Azure Storage SDK for JavaScript - Blob
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/
|
||||
|
2
.licenses/npm/@types/node-fetch.dep.yml
generated
2
.licenses/npm/@types/node-fetch.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@types/node-fetch"
|
||||
version: 2.6.1
|
||||
version: 2.6.3
|
||||
type: npm
|
||||
summary: TypeScript definitions for node-fetch
|
||||
homepage: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch
|
||||
|
34
.licenses/npm/ip-regex.dep.yml
generated
34
.licenses/npm/ip-regex.dep.yml
generated
@ -1,34 +0,0 @@
|
||||
---
|
||||
name: ip-regex
|
||||
version: 2.1.0
|
||||
type: npm
|
||||
summary: Regular expression for matching IP addresses (IPv4 & IPv6)
|
||||
homepage: https://github.com/sindresorhus/ip-regex#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: license
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
- sources: readme.md
|
||||
text: MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
notices: []
|
2
.licenses/npm/minimatch.dep.yml
generated
2
.licenses/npm/minimatch.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: minimatch
|
||||
version: 3.0.4
|
||||
version: 3.1.2
|
||||
type: npm
|
||||
summary: a glob matcher in javascript
|
||||
homepage: https://github.com/isaacs/minimatch#readme
|
||||
|
43
.licenses/npm/psl.dep.yml
generated
43
.licenses/npm/psl.dep.yml
generated
@ -1,43 +0,0 @@
|
||||
---
|
||||
name: psl
|
||||
version: 1.8.0
|
||||
type: npm
|
||||
summary: Domain name parser based on the Public Suffix List
|
||||
homepage: https://github.com/lupomontero/psl#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero lupomontero@gmail.com
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero <lupomontero@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
notices: []
|
34
.licenses/npm/punycode.dep.yml
generated
34
.licenses/npm/punycode.dep.yml
generated
@ -1,34 +0,0 @@
|
||||
---
|
||||
name: punycode
|
||||
version: 2.1.1
|
||||
type: npm
|
||||
summary: A robust Punycode converter that fully complies to RFC 3492 and RFC 5891,
|
||||
and works on nearly all JavaScript platforms.
|
||||
homepage: https://mths.be/punycode
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE-MIT.txt
|
||||
text: |
|
||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: Punycode.js is available under the [MIT](https://mths.be/mit) license.
|
||||
notices: []
|
@ -1,9 +1,9 @@
|
||||
---
|
||||
name: semver
|
||||
version: 6.1.2
|
||||
version: 6.3.1
|
||||
type: npm
|
||||
summary: The semantic version parser used by npm.
|
||||
homepage: https://github.com/npm/node-semver#readme
|
||||
homepage:
|
||||
license: isc
|
||||
licenses:
|
||||
- sources: LICENSE
|
23
.licenses/npm/tough-cookie-3.0.1.dep.yml
generated
23
.licenses/npm/tough-cookie-3.0.1.dep.yml
generated
@ -1,23 +0,0 @@
|
||||
---
|
||||
name: tough-cookie
|
||||
version: 3.0.1
|
||||
type: npm
|
||||
summary: RFC6265 Cookies and Cookie Jar for node.js
|
||||
homepage: https://github.com/salesforce/tough-cookie
|
||||
license: bsd-3-clause
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
notices: []
|
23
.licenses/npm/tough-cookie-4.0.0.dep.yml
generated
23
.licenses/npm/tough-cookie-4.0.0.dep.yml
generated
@ -1,23 +0,0 @@
|
||||
---
|
||||
name: tough-cookie
|
||||
version: 4.0.0
|
||||
type: npm
|
||||
summary: RFC6265 Cookies and Cookie Jar for node.js
|
||||
homepage: https://github.com/salesforce/tough-cookie
|
||||
license: bsd-3-clause
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
notices: []
|
35
.licenses/npm/tslib-2.5.0.dep.yml
generated
Normal file
35
.licenses/npm/tslib-2.5.0.dep.yml
generated
Normal file
@ -0,0 +1,35 @@
|
||||
---
|
||||
name: tslib
|
||||
version: 2.5.0
|
||||
type: npm
|
||||
summary: Runtime library for TypeScript helper functions
|
||||
homepage: https://www.typescriptlang.org/
|
||||
license: 0bsd
|
||||
licenses:
|
||||
- sources: LICENSE.txt
|
||||
text: |-
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
notices:
|
||||
- sources: CopyrightNotice.txt
|
||||
text: "/******************************************************************************\r\nCopyright
|
||||
(c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute
|
||||
this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE
|
||||
SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD
|
||||
TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS.
|
||||
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE,
|
||||
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS
|
||||
ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS
|
||||
SOFTWARE.\r\n*****************************************************************************
|
||||
*/"
|
33
.licenses/npm/universalify.dep.yml
generated
33
.licenses/npm/universalify.dep.yml
generated
@ -1,33 +0,0 @@
|
||||
---
|
||||
name: universalify
|
||||
version: 0.1.2
|
||||
type: npm
|
||||
summary: Make a callback- or promise-based function support both promises and callbacks.
|
||||
homepage: https://github.com/RyanZim/universalify#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2017, Ryan Zimmerman <opensrc@ryanzim.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the 'Software'), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: MIT
|
||||
notices: []
|
20
.licenses/npm/uuid-9.0.0.dep.yml
generated
Normal file
20
.licenses/npm/uuid-9.0.0.dep.yml
generated
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: uuid
|
||||
version: 9.0.0
|
||||
type: npm
|
||||
summary: RFC4122 (v1, v4, and v5) UUIDs
|
||||
homepage:
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2010-2020 Robert Kieffer and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
notices: []
|
2
.licenses/npm/xml2js.dep.yml
generated
2
.licenses/npm/xml2js.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: xml2js
|
||||
version: 0.4.23
|
||||
version: 0.5.0
|
||||
type: npm
|
||||
summary: Simple XML to JavaScript object converter.
|
||||
homepage: https://github.com/Leonidas-from-XIV/node-xml2js
|
||||
|
7
.prettierignore
Normal file
7
.prettierignore
Normal file
@ -0,0 +1,7 @@
|
||||
# Ignore list
|
||||
/*
|
||||
|
||||
# Do not ignore these folders:
|
||||
!__tests__/
|
||||
!.github/
|
||||
!src/
|
11
.prettierrc.js
Normal file
11
.prettierrc.js
Normal file
@ -0,0 +1,11 @@
|
||||
// This is a reusable configuration file copied from https://github.com/actions/reusable-workflows/tree/main/reusable-configurations. Please don't make changes to this file as it's the subject of an automatic update.
|
||||
module.exports = {
|
||||
printWidth: 80,
|
||||
tabWidth: 2,
|
||||
useTabs: false,
|
||||
semi: true,
|
||||
singleQuote: true,
|
||||
trailingComma: 'none',
|
||||
bracketSpacing: false,
|
||||
arrowParens: 'avoid'
|
||||
};
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript"
|
||||
}
|
@ -1 +1 @@
|
||||
* @actions/actions-service
|
||||
* @actions/setup-actions-team
|
||||
|
104
README.md
104
README.md
@ -1,7 +1,8 @@
|
||||
# setup-node
|
||||
|
||||
[](https://github.com/actions/setup-node/actions/workflows/build-test.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/basic-validation.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/versions.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/e2e-cache.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/proxy.yml)
|
||||
|
||||
This action provides the following functionality for GitHub Actions users:
|
||||
@ -15,6 +16,69 @@ This action provides the following functionality for GitHub Actions users:
|
||||
|
||||
See [action.yml](action.yml)
|
||||
|
||||
<!-- start usage -->
|
||||
```yaml
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
# Version Spec of the version to use in SemVer notation.
|
||||
# It also emits such aliases as lts, latest, nightly and canary builds
|
||||
# Examples: 12.x, 10.15.1, >=10.15.0, lts/Hydrogen, 16-nightly, latest, node
|
||||
node-version: ''
|
||||
|
||||
# File containing the version Spec of the version to use. Examples: .nvmrc, .node-version, .tool-versions.
|
||||
# If node-version and node-version-file are both provided the action will use version from node-version.
|
||||
node-version-file: ''
|
||||
|
||||
# Set this option if you want the action to check for the latest available version
|
||||
# that satisfies the version spec.
|
||||
# It will only get affect for lts Nodejs versions (12.x, >=10.15.0, lts/Hydrogen).
|
||||
# Default: false
|
||||
check-latest: false
|
||||
|
||||
# Target architecture for Node to use. Examples: x86, x64. Will use system architecture by default.
|
||||
# Default: ''. The action use system architecture by default
|
||||
architecture: ''
|
||||
|
||||
# Used to pull node distributions from https://github.com/actions/node-versions.
|
||||
# Since there's a default, this is typically not supplied by the user.
|
||||
# When running this action on github.com, the default value is sufficient.
|
||||
# When running on GHES, you can pass a personal access token for github.com if you are experiencing rate limiting.
|
||||
#
|
||||
# We recommend using a service account with the least permissions necessary. Also
|
||||
# when generating a new PAT, select the least scopes necessary.
|
||||
#
|
||||
# [Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
||||
#
|
||||
# Default: ${{ github.server_url == 'https://github.com' && github.token || '' }}
|
||||
token: ''
|
||||
|
||||
# Used to specify a package manager for caching in the default directory. Supported values: npm, yarn, pnpm.
|
||||
# Package manager should be pre-installed
|
||||
# Default: ''
|
||||
cache: ''
|
||||
|
||||
# Used to specify the path to a dependency file: package-lock.json, yarn.lock, etc.
|
||||
# It will generate hash from the target file for primary key. It works only If cache is specified.
|
||||
# Supports wildcards or a list of file names for caching multiple dependencies.
|
||||
# Default: ''
|
||||
cache-dependency-path: ''
|
||||
|
||||
# Optional registry to set up for auth. Will set the registry in a project level .npmrc and .yarnrc file,
|
||||
# and set up auth to read in from env.NODE_AUTH_TOKEN.
|
||||
# Default: ''
|
||||
registry-url: ''
|
||||
|
||||
# Optional scope for authenticating against scoped registries.
|
||||
# Will fall back to the repository owner when using the GitHub Packages registry (https://npm.pkg.github.com/).
|
||||
# Default: ''
|
||||
scope: ''
|
||||
|
||||
# Set always-auth option in npmrc file.
|
||||
# Default: ''
|
||||
always-auth: ''
|
||||
```
|
||||
<!-- end usage -->
|
||||
|
||||
**Basic:**
|
||||
|
||||
```yaml
|
||||
@ -22,7 +86,7 @@ steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
node-version: 18
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
@ -35,7 +99,7 @@ For information regarding locally cached versions of Node.js on GitHub hosted ru
|
||||
|
||||
### Supported version syntax
|
||||
|
||||
The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to the [documentation](https://github.com/npm/node-semver).
|
||||
The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to [the semver package documentation](https://github.com/npm/node-semver).
|
||||
|
||||
Examples:
|
||||
|
||||
@ -111,16 +175,34 @@ jobs:
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
## Using `setup-node` on GHES
|
||||
|
||||
`setup-node` comes pre-installed on the appliance with GHES if Actions is enabled. When dynamically downloading Nodejs distributions, `setup-node` downloads distributions from [`actions/node-versions`](https://github.com/actions/node-versions) on github.com (outside of the appliance). These calls to `actions/node-versions` are made via unauthenticated requests, which are limited to [60 requests per hour per IP](https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting). If more requests are made within the time frame, then you will start to see rate-limit errors during downloading that looks like: `##[error]API rate limit exceeded for...`. After that error the action will try to download versions directly from the official site, but it also can have rate limit so it's better to put token.
|
||||
|
||||
To get a higher rate limit, you can [generate a personal access token on github.com](https://github.com/settings/tokens/new) and pass it as the `token` input for the action:
|
||||
|
||||
```yaml
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
token: ${{ secrets.GH_DOTCOM_TOKEN }}
|
||||
node-version: 16
|
||||
```
|
||||
|
||||
If the runner is not able to access github.com, any Nodejs versions requested during a workflow run must come from the runner's tool cache. See "[Setting up the tool cache on self-hosted runners without internet access](https://docs.github.com/en/enterprise-server@3.2/admin/github-actions/managing-access-to-actions-from-githubcom/setting-up-the-tool-cache-on-self-hosted-runners-without-internet-access)" for more information.
|
||||
|
||||
## Advanced usage
|
||||
|
||||
1. [Check latest version](docs/advanced-usage.md#check-latest-version)
|
||||
2. [Using a node version file](docs/advanced-usage.md#node-version-file)
|
||||
3. [Using different architectures](docs/advanced-usage.md#architecture)
|
||||
4. [Caching packages data](docs/advanced-usage.md#caching-packages-data)
|
||||
5. [Using multiple operating systems and architectures](docs/advanced-usage.md#multiple-operating-systems-and-architectures)
|
||||
6. [Publishing to npmjs and GPR with npm](docs/advanced-usage.md#publish-to-npmjs-and-gpr-with-npm)
|
||||
7. [Publishing to npmjs and GPR with yarn](docs/advanced-usage.md#publish-to-npmjs-and-gpr-with-yarn)
|
||||
8. [Using private packages](docs/advanced-usage.md#use-private-packages)
|
||||
- [Check latest version](docs/advanced-usage.md#check-latest-version)
|
||||
- [Using a node version file](docs/advanced-usage.md#node-version-file)
|
||||
- [Using different architectures](docs/advanced-usage.md#architecture)
|
||||
- [Using v8 canary versions](docs/advanced-usage.md#v8-canary-versions)
|
||||
- [Using nightly versions](docs/advanced-usage.md#nightly-versions)
|
||||
- [Using rc versions](docs/advanced-usage.md#rc-versions)
|
||||
- [Caching packages data](docs/advanced-usage.md#caching-packages-data)
|
||||
- [Using multiple operating systems and architectures](docs/advanced-usage.md#multiple-operating-systems-and-architectures)
|
||||
- [Publishing to npmjs and GPR with npm](docs/advanced-usage.md#publish-to-npmjs-and-gpr-with-npm)
|
||||
- [Publishing to npmjs and GPR with yarn](docs/advanced-usage.md#publish-to-npmjs-and-gpr-with-yarn)
|
||||
- [Using private packages](docs/advanced-usage.md#use-private-packages)
|
||||
|
||||
## License
|
||||
|
||||
|
10
__tests__/README.md
Normal file
10
__tests__/README.md
Normal file
@ -0,0 +1,10 @@
|
||||
Files located in data directory are used only for testing purposes.
|
||||
|
||||
|
||||
## Here the list of files in the data directory
|
||||
- `.nvmrc`, `.tools-versions` and `package.json` are used to test node-version-file logic
|
||||
- `package-lock.json`, `pnpm-lock.yaml` and `yarn.lock` are used to test cache logic
|
||||
- `versions-manifest.json` is used for unit testing to check downloading Node.js versions from the node-versions repository.
|
||||
- `node-dist-index.json` is used for unit testing to check downloading Node.js versions from the official site. The file was constructed from https://nodejs.org/dist/index.json
|
||||
- `node-rc-index.json` is used for unit testing to check downloading Node.js rc versions from the official site. The file was constructed from https://nodejs.org/download/rc/index.json
|
||||
- `node-nightly-index.json` is used for unit testing to check downloading Node.js nightly builds from the official site. The file was constructed from https://nodejs.org/download/nightly/index.json
|
@ -1,9 +1,10 @@
|
||||
import os = require('os');
|
||||
import * as fs from 'fs';
|
||||
import os from 'os';
|
||||
import fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as auth from '../src/authutil';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
|
||||
let rcFile: string;
|
||||
|
||||
@ -15,11 +16,7 @@ describe('authutil tests', () => {
|
||||
let dbgSpy: jest.SpyInstance;
|
||||
|
||||
beforeAll(async () => {
|
||||
const randPath = path.join(
|
||||
Math.random()
|
||||
.toString(36)
|
||||
.substring(7)
|
||||
);
|
||||
const randPath = path.join(Math.random().toString(36).substring(7));
|
||||
console.log('::stop-commands::stoptoken'); // Disable executing of runner commands when running tests in actions
|
||||
process.env['GITHUB_ENV'] = ''; // Stub out Environment file functionality so we can verify it writes to standard out (toolkit is backwards compatible)
|
||||
const tempDir = path.join(_runnerDir, randPath, 'temp');
|
||||
@ -67,10 +64,10 @@ describe('authutil tests', () => {
|
||||
}, 100000);
|
||||
|
||||
function readRcFile(rcFile: string) {
|
||||
let rc = {};
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const rc = {};
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
for (const line of contents.split(os.EOL)) {
|
||||
let parts = line.split('=');
|
||||
const parts = line.split('=');
|
||||
if (parts.length == 2) {
|
||||
rc[parts[0].trim()] = parts[1].trim();
|
||||
}
|
||||
@ -82,8 +79,8 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
let rc = readRcFile(rcFile);
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -92,7 +89,7 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -102,7 +99,7 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['@myscope:registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -111,7 +108,7 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('npm.pkg.github.com', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['@ownername:registry']).toBe('npm.pkg.github.com/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -119,17 +116,98 @@ describe('authutil tests', () => {
|
||||
it('Sets up npmrc for always-auth true', async () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('true');
|
||||
});
|
||||
it('It is already set the NODE_AUTH_TOKEN export it ', async () => {
|
||||
|
||||
it('is already set the NODE_AUTH_TOKEN export it', async () => {
|
||||
process.env.NODE_AUTH_TOKEN = 'foobar';
|
||||
await auth.configAuthentication('npm.pkg.github.com', 'false');
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['@ownername:registry']).toBe('npm.pkg.github.com/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
expect(process.env.NODE_AUTH_TOKEN).toEqual('foobar');
|
||||
});
|
||||
|
||||
it('configAuthentication should overwrite non-scoped with non-scoped', async () => {
|
||||
fs.writeFileSync(rcFile, 'registry=NNN');
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
|
||||
it('configAuthentication should overwrite only non-scoped', async () => {
|
||||
fs.writeFileSync(rcFile, `registry=NNN${os.EOL}@myscope:registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@myscope:registry=MMM${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
|
||||
it('configAuthentication should add non-scoped to scoped', async () => {
|
||||
fs.writeFileSync(rcFile, '@myscope:registry=NNN');
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@myscope:registry=NNN${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
|
||||
it('configAuthentication should overwrite scoped with scoped', async () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `@myscope:registry=NNN`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
|
||||
it('configAuthentication should overwrite only scoped', async () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `registry=NNN${os.EOL}@myscope:registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`registry=NNN${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
|
||||
it('configAuthentication should add scoped to non-scoped', async () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`registry=MMM${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
|
||||
it('configAuthentication should overwrite only one scoped', async () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(
|
||||
rcFile,
|
||||
`@otherscope:registry=NNN${os.EOL}@myscope:registry=MMM`
|
||||
);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@otherscope:registry=NNN${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
|
||||
it('configAuthentication should add scoped to another scoped', async () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `@otherscope:registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@otherscope:registry=MMM${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -32,13 +32,13 @@ describe('cache-restore', () => {
|
||||
|
||||
function findCacheFolder(command: string) {
|
||||
switch (command) {
|
||||
case utils.supportedPackageManagers.npm.getCacheFolderCommand:
|
||||
case 'npm config get cache':
|
||||
return npmCachePath;
|
||||
case utils.supportedPackageManagers.pnpm.getCacheFolderCommand:
|
||||
case 'pnpm store path --silent':
|
||||
return pnpmCachePath;
|
||||
case utils.supportedPackageManagers.yarn1.getCacheFolderCommand:
|
||||
case 'yarn cache dir':
|
||||
return yarn1CachePath;
|
||||
case utils.supportedPackageManagers.yarn2.getCacheFolderCommand:
|
||||
case 'yarn config get cacheFolder':
|
||||
return yarn2CachePath;
|
||||
default:
|
||||
return 'packge/not/found';
|
||||
@ -108,7 +108,7 @@ describe('cache-restore', () => {
|
||||
it.each([['npm7'], ['npm6'], ['pnpm6'], ['yarn1'], ['yarn2'], ['random']])(
|
||||
'Throw an error because %s is not supported',
|
||||
async packageManager => {
|
||||
await expect(restoreCache(packageManager)).rejects.toThrowError(
|
||||
await expect(restoreCache(packageManager, '')).rejects.toThrow(
|
||||
`Caching for '${packageManager}' is not supported`
|
||||
);
|
||||
}
|
||||
@ -132,7 +132,7 @@ describe('cache-restore', () => {
|
||||
}
|
||||
});
|
||||
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache restored from key: node-cache-${platform}-${packageManager}-${fileHash}`
|
||||
@ -163,7 +163,7 @@ describe('cache-restore', () => {
|
||||
});
|
||||
|
||||
restoreCacheSpy.mockImplementationOnce(() => undefined);
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`${packageManager} cache is not found`
|
||||
|
@ -18,7 +18,7 @@ describe('run', () => {
|
||||
const commonPath = '/some/random/path';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
|
||||
let inputs = {} as any;
|
||||
const inputs = {} as any;
|
||||
|
||||
let getInputSpy: jest.SpyInstance;
|
||||
let infoSpy: jest.SpyInstance;
|
||||
@ -107,18 +107,20 @@ describe('run', () => {
|
||||
describe('Validate unchanged cache is not saved', () => {
|
||||
it('should not save cache for yarn1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -127,18 +129,20 @@ describe('run', () => {
|
||||
|
||||
it('should not save cache for yarn2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -147,35 +151,40 @@ describe('run', () => {
|
||||
|
||||
it('should not save cache for npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation(() => npmFileHash);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not save cache for pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation(() => pnpmFileHash);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@ -183,24 +192,22 @@ describe('run', () => {
|
||||
describe('action saves the cache', () => {
|
||||
it('saves cache from yarn 1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -213,24 +220,22 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from yarn 2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -243,21 +248,22 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -270,21 +276,22 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return pnpmFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? pnpmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -297,14 +304,15 @@ describe('run', () => {
|
||||
|
||||
it('save with -1 cacheId , should not fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
return -1;
|
||||
});
|
||||
@ -312,9 +320,9 @@ describe('run', () => {
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -327,14 +335,15 @@ describe('run', () => {
|
||||
|
||||
it('saves with error from toolkit, should fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
throw new cache.ValidationError('Validation failed');
|
||||
});
|
||||
@ -342,9 +351,9 @@ describe('run', () => {
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -2,7 +2,18 @@ import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
import path from 'path';
|
||||
import * as utils from '../src/cache-utils';
|
||||
import {PackageManagerInfo, isCacheFeatureAvailable} from '../src/cache-utils';
|
||||
import {
|
||||
PackageManagerInfo,
|
||||
isCacheFeatureAvailable,
|
||||
supportedPackageManagers,
|
||||
getCommandOutput,
|
||||
resetProjectDirectoriesMemoized
|
||||
} from '../src/cache-utils';
|
||||
import fs from 'fs';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
import * as glob from '@actions/glob';
|
||||
import {Globber} from '@actions/glob';
|
||||
import {MockGlobber} from './mock/glob-mock';
|
||||
|
||||
describe('cache-utils', () => {
|
||||
const versionYarn1 = '1.2.3';
|
||||
@ -12,8 +23,10 @@ describe('cache-utils', () => {
|
||||
let isFeatureAvailable: jest.SpyInstance;
|
||||
let info: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let fsRealPathSyncSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
console.log('::stop-commands::stoptoken');
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
debugSpy = jest.spyOn(core, 'debug');
|
||||
debugSpy.mockImplementation(msg => {});
|
||||
@ -24,13 +37,29 @@ describe('cache-utils', () => {
|
||||
isFeatureAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
|
||||
getCommandOutputSpy = jest.spyOn(utils, 'getCommandOutput');
|
||||
|
||||
fsRealPathSyncSpy = jest.spyOn(fs, 'realpathSync');
|
||||
fsRealPathSyncSpy.mockImplementation(dirName => {
|
||||
return dirName;
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
//jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
console.log('::stoptoken::');
|
||||
jest.restoreAllMocks();
|
||||
}, 100000);
|
||||
|
||||
describe('getPackageManagerInfo', () => {
|
||||
it.each<[string, PackageManagerInfo | null]>([
|
||||
['npm', utils.supportedPackageManagers.npm],
|
||||
['pnpm', utils.supportedPackageManagers.pnpm],
|
||||
['yarn', utils.supportedPackageManagers.yarn1],
|
||||
['yarn', utils.supportedPackageManagers.yarn],
|
||||
['yarn1', null],
|
||||
['yarn2', null],
|
||||
['npm7', null]
|
||||
@ -46,7 +75,8 @@ describe('cache-utils', () => {
|
||||
isFeatureAvailable.mockImplementation(() => false);
|
||||
process.env['GITHUB_SERVER_URL'] = 'https://www.test.com';
|
||||
|
||||
expect(() => isCacheFeatureAvailable()).toThrowError(
|
||||
expect(isCacheFeatureAvailable()).toBeFalsy();
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
'Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'
|
||||
);
|
||||
});
|
||||
@ -71,4 +101,263 @@ describe('cache-utils', () => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getCacheDirectoriesPaths', () => {
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let lstatSpy: jest.SpyInstance;
|
||||
let globCreateSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
existsSpy.mockImplementation(() => true);
|
||||
|
||||
lstatSpy = jest.spyOn(fs, 'lstatSync');
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => true
|
||||
}));
|
||||
|
||||
globCreateSpy = jest.spyOn(glob, 'create');
|
||||
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/foo', '/bar'])
|
||||
);
|
||||
|
||||
resetProjectDirectoriesMemoized();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
existsSpy.mockRestore();
|
||||
lstatSpy.mockRestore();
|
||||
globCreateSpy.mockRestore();
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should return one dir for non yarn',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
// to do not call for a version
|
||||
// call once for get cache folder
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
it('getCacheDirectoriesPaths should return one dir for yarn without cacheDependency', async () => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock'],
|
||||
[supportedPackageManagers.yarn, ''],
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should throw for getCommandOutput returning empty',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
// return empty string to indicate getCacheFolderPath failed
|
||||
// --version still works
|
||||
command.includes('version') ? '1.' : ''
|
||||
);
|
||||
|
||||
await expect(
|
||||
cacheUtils.getCacheDirectories(packageManagerInfo, cacheDependency)
|
||||
).rejects.toThrow(); //'Could not get cache folder path for /dir');
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should nothrow in case of having not directories',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => false
|
||||
}));
|
||||
|
||||
await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(warningSpy).toHaveBeenCalledTimes(1);
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependency}"`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return one dir without cacheDependency',
|
||||
async version => {
|
||||
getCommandOutputSpy.mockImplementationOnce(() => version);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `foo${version}`);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual([`foo${version}`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/tmp/dir1/file', '/tmp/dir2/file'])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency expanding to duplicates',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir1/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 uniq dirs despite duplicate cache directories',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version')
|
||||
? version
|
||||
: `file_${version}_${dirNo++ % 2}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_0`]);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(6);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 4 dirs with multiple globs',
|
||||
async version => {
|
||||
// simulate wrong indents
|
||||
const cacheDependencyPath = `/tmp/dir1/file
|
||||
/tmp/dir2/file
|
||||
/tmp/**/file
|
||||
`;
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file',
|
||||
'/tmp/dir4/file'
|
||||
])
|
||||
);
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
cacheDependencyPath
|
||||
);
|
||||
expect(dirs).toEqual([
|
||||
`file_${version}_1`,
|
||||
`file_${version}_2`,
|
||||
`file_${version}_3`,
|
||||
`file_${version}_4`
|
||||
]);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
531
__tests__/canary-installer.test.ts
Normal file
531
__tests__/canary-installer.test.ts
Normal file
@ -0,0 +1,531 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import * as httpm from '@actions/http-client';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as cache from '@actions/cache';
|
||||
import fs from 'fs';
|
||||
import cp from 'child_process';
|
||||
import osm from 'os';
|
||||
import path from 'path';
|
||||
import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let inputs = {} as any;
|
||||
let os = {} as any;
|
||||
|
||||
let inSpy: jest.SpyInstance;
|
||||
let findSpy: jest.SpyInstance;
|
||||
let findAllVersionsSpy: jest.SpyInstance;
|
||||
let cnSpy: jest.SpyInstance;
|
||||
let logSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let getManifestSpy: jest.SpyInstance;
|
||||
let getDistSpy: jest.SpyInstance;
|
||||
let platSpy: jest.SpyInstance;
|
||||
let archSpy: jest.SpyInstance;
|
||||
let dlSpy: jest.SpyInstance;
|
||||
let exSpy: jest.SpyInstance;
|
||||
let cacheSpy: jest.SpyInstance;
|
||||
let dbgSpy: jest.SpyInstance;
|
||||
let whichSpy: jest.SpyInstance;
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let readFileSyncSpy: jest.SpyInstance;
|
||||
let mkdirpSpy: jest.SpyInstance;
|
||||
let execSpy: jest.SpyInstance;
|
||||
let authSpy: jest.SpyInstance;
|
||||
let parseNodeVersionSpy: jest.SpyInstance;
|
||||
let isCacheActionAvailable: jest.SpyInstance;
|
||||
let getExecOutputSpy: jest.SpyInstance;
|
||||
let getJsonSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
// @actions/core
|
||||
console.log('::stop-commands::stoptoken'); // Disable executing of runner commands when running tests in actions
|
||||
process.env['GITHUB_PATH'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
process.env['GITHUB_OUTPUT'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
inputs = {};
|
||||
inSpy = jest.spyOn(core, 'getInput');
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
// node
|
||||
os = {};
|
||||
platSpy = jest.spyOn(osm, 'platform');
|
||||
platSpy.mockImplementation(() => os['platform']);
|
||||
archSpy = jest.spyOn(osm, 'arch');
|
||||
archSpy.mockImplementation(() => os['arch']);
|
||||
execSpy = jest.spyOn(cp, 'execSync');
|
||||
|
||||
// @actions/tool-cache
|
||||
findSpy = jest.spyOn(tc, 'find');
|
||||
findAllVersionsSpy = jest.spyOn(tc, 'findAllVersions');
|
||||
dlSpy = jest.spyOn(tc, 'downloadTool');
|
||||
exSpy = jest.spyOn(tc, 'extractTar');
|
||||
cacheSpy = jest.spyOn(tc, 'cacheDir');
|
||||
getManifestSpy = jest.spyOn(tc, 'getManifestFromRepo');
|
||||
|
||||
// http-client
|
||||
getJsonSpy = jest.spyOn(httpm.HttpClient.prototype, 'getJson');
|
||||
|
||||
// io
|
||||
whichSpy = jest.spyOn(io, 'which');
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
mkdirpSpy = jest.spyOn(io, 'mkdirP');
|
||||
|
||||
// @actions/tool-cache
|
||||
isCacheActionAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
|
||||
// disable authentication portion for installer tests
|
||||
authSpy = jest.spyOn(auth, 'configAuthentication');
|
||||
authSpy.mockImplementation(() => {});
|
||||
|
||||
// gets
|
||||
getManifestSpy.mockImplementation(
|
||||
() => <tc.IToolRelease[]>nodeTestManifest
|
||||
);
|
||||
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else if (url.includes('/v8-canary')) {
|
||||
res = <INodeVersion[]>nodeV8CanaryTestDist;
|
||||
} else {
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
});
|
||||
|
||||
// writes
|
||||
cnSpy = jest.spyOn(process.stdout, 'write');
|
||||
logSpy = jest.spyOn(core, 'info');
|
||||
dbgSpy = jest.spyOn(core, 'debug');
|
||||
warningSpy = jest.spyOn(core, 'warning');
|
||||
cnSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('write:' + line + '\n');
|
||||
});
|
||||
logSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + line + '\n');
|
||||
});
|
||||
dbgSpy.mockImplementation(msg => {
|
||||
// uncomment to see debug output
|
||||
// process.stderr.write(msg + '\n');
|
||||
});
|
||||
warningSpy.mockImplementation(msg => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + msg + '\n');
|
||||
});
|
||||
|
||||
// @actions/exec
|
||||
getExecOutputSpy = jest.spyOn(exec, 'getExecOutput');
|
||||
getExecOutputSpy.mockImplementation(() => 'v16.15.0');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
//jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
console.log('::stoptoken::'); // Re-enable executing of runner commands when running tests in actions
|
||||
jest.restoreAllMocks();
|
||||
}, 100000);
|
||||
|
||||
//--------------------------------------------------
|
||||
// Found in cache tests
|
||||
//--------------------------------------------------
|
||||
|
||||
it('finds version in cache with stable true', async () => {
|
||||
inputs['node-version'] = '20-v8-canary';
|
||||
os['arch'] = 'x64';
|
||||
inputs.stable = 'true';
|
||||
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/20.0.0-v8-canary20221103f7e2421e91/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'20.0.0-v8-canary2022102310ff1e5a8d'
|
||||
]);
|
||||
await main.run();
|
||||
|
||||
expect(findSpy).toHaveBeenCalledWith(
|
||||
'node',
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'x64'
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
});
|
||||
|
||||
it('finds version in cache and adds it to the path', async () => {
|
||||
inputs['node-version'] = '20-v8-canary';
|
||||
os['arch'] = 'x64';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/20.0.0-v8-canary20221103f7e2421e91/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'20.0.0-v8-canary2022102310ff1e5a8d'
|
||||
]);
|
||||
await main.run();
|
||||
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
os.platform = 'linux';
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '20.0.0-v8-canary20221103f7e2421e91';
|
||||
|
||||
findSpy.mockImplementation(() => {
|
||||
throw new Error(errMsg);
|
||||
});
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'20.0.0-v8-canary2022102310ff1e5a8d'
|
||||
]);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith('::error::' + errMsg + osm.EOL);
|
||||
});
|
||||
|
||||
//--------------------------------------------------
|
||||
// Manifest tests
|
||||
//--------------------------------------------------
|
||||
it('falls back to a version from node dist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
const versionSpec = '11.15.0';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
const toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
'Not found in manifest. Falling back to download directly from Node'
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Attempting to download ${versionSpec}...`
|
||||
);
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('does not find a version that does not exist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
const versionSpec = '23.0.0-v8-canary20221103f7e2421e91';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'20.0.0-v8-canary2022102310ff1e5a8d'
|
||||
]);
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::Unable to find Node version '${versionSpec}' for platform ${os.platform} and architecture ${os.arch}.${osm.EOL}`
|
||||
);
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
const versionSpec = '19.0.0-v8-canary';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'20.0.0-v8-canary2022102310ff1e5a8d'
|
||||
]);
|
||||
dlSpy.mockImplementation(() => {
|
||||
throw new Error(errMsg);
|
||||
});
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::error::${errMsg}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('acquires specified architecture of node', async () => {
|
||||
for (const {arch, version, osSpec} of [
|
||||
{
|
||||
arch: 'x86',
|
||||
version: '20.0.0-v8-canary20221022e83bcb6c41',
|
||||
osSpec: 'win32'
|
||||
},
|
||||
{
|
||||
arch: 'x86',
|
||||
version: '20.0.0-v8-canary20221103f7e2421e91',
|
||||
osSpec: 'win32'
|
||||
}
|
||||
]) {
|
||||
os.platform = osSpec;
|
||||
os.arch = arch;
|
||||
const fileExtension = os.platform === 'win32' ? '7z' : 'tar.gz';
|
||||
const platform = {
|
||||
linux: 'linux',
|
||||
darwin: 'darwin',
|
||||
win32: 'win'
|
||||
}[os.platform];
|
||||
|
||||
inputs['node-version'] = version;
|
||||
inputs['architecture'] = arch;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
const expectedUrl = `https://nodejs.org/download/v8-canary/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Acquiring ${version} - ${arch} from ${expectedUrl}`
|
||||
);
|
||||
}
|
||||
}, 100000);
|
||||
|
||||
describe('nightly versions', () => {
|
||||
it.each([
|
||||
[
|
||||
'20.0.0-v8-canary',
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'https://nodejs.org/download/v8-canary/v20.0.0-v8-canary20221103f7e2421e91/node-v20.0.0-v8-canary20221103f7e2421e91-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'20-v8-canary',
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'https://nodejs.org/download/v8-canary/v20.0.0-v8-canary20221103f7e2421e91/node-v20.0.0-v8-canary20221103f7e2421e91-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'19.0.0-v8-canary',
|
||||
'19.0.0-v8-canary202210187d6960f23f',
|
||||
'https://nodejs.org/download/v8-canary/v19.0.0-v8-canary202210187d6960f23f/node-v19.0.0-v8-canary202210187d6960f23f-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'19-v8-canary',
|
||||
'19.0.0-v8-canary202210187d6960f23f',
|
||||
'https://nodejs.org/download/v8-canary/v19.0.0-v8-canary202210187d6960f23f/node-v19.0.0-v8-canary202210187d6960f23f-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'19.0.0-v8-canary202210187d6960f23f',
|
||||
'19.0.0-v8-canary202210187d6960f23f',
|
||||
'https://nodejs.org/download/v8-canary/v19.0.0-v8-canary202210187d6960f23f/node-v19.0.0-v8-canary202210187d6960f23f-linux-x64.tar.gz'
|
||||
]
|
||||
])(
|
||||
'finds the versions in the index.json and installs it',
|
||||
async (input, expectedVersion, expectedUrl) => {
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Acquiring ${expectedVersion} - ${os.arch} from ${expectedUrl}`
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith('Extracting ...');
|
||||
expect(logSpy).toHaveBeenCalledWith('Adding to the cache ...');
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
[
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221103f7e2421e91'
|
||||
],
|
||||
['20.0.0-v8-canary', '20.0.0-v8-canary20221103f7e2421e91'],
|
||||
['20-v8-canary', '20.0.0-v8-canary20221103f7e2421e91']
|
||||
])(
|
||||
'finds the %s version in the hostedToolcache',
|
||||
async (input, expectedVersion) => {
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
findSpy.mockReturnValue(toolPath);
|
||||
findAllVersionsSpy.mockReturnValue([
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'20.0.0-v8-canary2022102310ff1e5a8d'
|
||||
]);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(findAllVersionsSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
[
|
||||
'20.0.0-v8-canary',
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'https://nodejs.org/download/v8-canary/v20.0.0-v8-canary20221103f7e2421e91/node-v20.0.0-v8-canary20221103f7e2421e91-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'20-v8-canary',
|
||||
'20.0.0-v8-canary20221103f7e2421e91',
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'https://nodejs.org/download/v8-canary/v20.0.0-v8-canary20221103f7e2421e91/node-v20.0.0-v8-canary20221103f7e2421e91-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'19.0.0-v8-canary',
|
||||
'19.0.0-v8-canary202210187d6960f23f',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'https://nodejs.org/download/v8-canary/v19.0.0-v8-canary202210187d6960f23f/node-v19.0.0-v8-canary202210187d6960f23f-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'19-v8-canary',
|
||||
'19.0.0-v8-canary202210187d6960f23f',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'https://nodejs.org/download/v8-canary/v19.0.0-v8-canary202210187d6960f23f/node-v19.0.0-v8-canary202210187d6960f23f-linux-x64.tar.gz'
|
||||
]
|
||||
])(
|
||||
'get %s version from dist if check-latest is true',
|
||||
async (input, expectedVersion, foundVersion, expectedUrl) => {
|
||||
const foundToolPath = path.normalize(`/cache/node/${foundVersion}/x64`);
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
inputs['check-latest'] = 'true';
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
|
||||
findSpy.mockReturnValue(foundToolPath);
|
||||
findAllVersionsSpy.mockReturnValue([
|
||||
'20.0.0-v8-canary20221030fefe1c0879',
|
||||
'19.0.0-v8-canary202210172ec229fc56',
|
||||
'20.0.0-v8-canary2022102310ff1e5a8d'
|
||||
]);
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(findAllVersionsSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Acquiring ${expectedVersion} - ${os.arch} from ${expectedUrl}`
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith('Extracting ...');
|
||||
expect(logSpy).toHaveBeenCalledWith('Adding to the cache ...');
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('setup-node v8 canary tests', () => {
|
||||
it('v8 canary setup node flow with cached', async () => {
|
||||
const versionSpec = 'v20-v8-canary';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
const versionExpected = 'v20.0.0-v8-canary20221103f7e2421e91';
|
||||
findAllVersionsSpy.mockImplementation(() => [versionExpected]);
|
||||
|
||||
const toolPath = path.normalize(`/cache/node/${versionExpected}/x64`);
|
||||
findSpy.mockImplementation(version => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${toolPath}${path.sep}bin${osm.EOL}`
|
||||
);
|
||||
|
||||
expect(dlSpy).not.toHaveBeenCalled();
|
||||
expect(exSpy).not.toHaveBeenCalled();
|
||||
expect(cacheSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
1
__tests__/data/.tool-versions-node
Normal file
1
__tests__/data/.tool-versions-node
Normal file
@ -0,0 +1 @@
|
||||
node 14.0.0
|
35
__tests__/data/node-nightly-index.json
Normal file
35
__tests__/data/node-nightly-index.json
Normal file
@ -0,0 +1,35 @@
|
||||
[
|
||||
{"version":"v20.0.0-nightly2022101987cdf7d412","date":"2022-10-19","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"8.19.2","v8":"10.7.193.16","uv":"1.43.0","zlib":"1.2.11","openssl":"3.0.5+quic","modules":"111","lts":false,"security":false},
|
||||
{"version":"v19.0.0-nightly202210182672219b78","date":"2022-10-18","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"8.19.2","v8":"10.7.193.13","uv":"1.43.0","zlib":"1.2.11","openssl":"3.0.5+quic","modules":"111","lts":false,"security":false},
|
||||
|
||||
|
||||
{"version":"v19.0.0-nightly202204201fe5d56403","date":"2022-04-20","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip"],"npm":"8.7.0","v8":"10.1.124.8","uv":"1.43.0","zlib":"1.2.11","openssl":"3.0.2+quic","modules":"108","lts":false,"security":false},
|
||||
{"version":"v18.0.0-nightly20220419bde889bd4e","date":"2022-04-19","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip"],"npm":"8.7.0","v8":"10.1.124.8","uv":"1.43.0","zlib":"1.2.11","openssl":"3.0.2+quic","modules":"108","lts":false,"security":false},
|
||||
{"version":"v18.0.0-nightly202204180699150267","date":"2022-04-18","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip"],"npm":"8.7.0","v8":"10.1.124.8","uv":"1.43.0","zlib":"1.2.11","openssl":"3.0.2+quic","modules":"108","lts":false,"security":false},
|
||||
|
||||
{"version":"v18.0.0-nightly202110204cb3e06ed8","date":"2021-10-20","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"8.1.0","v8":"9.5.172.21","uv":"1.42.0","zlib":"1.2.11","openssl":"3.0.0+quic","modules":"102","lts":false,"security":false},
|
||||
{"version":"v17.5.0-nightly20220209e43808936a","date":"2022-02-09","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"8.4.1","v8":"9.6.180.15","uv":"1.43.0","zlib":"1.2.11","openssl":"3.0.1+quic","modules":"102","lts":false,"security":false},
|
||||
{"version":"v17.0.0-nightly202110193f11666dc7","date":"2021-10-19","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"8.1.0","v8":"9.5.172.21","uv":"1.42.0","zlib":"1.2.11","openssl":"3.0.0+quic","modules":"102","lts":false,"security":false},
|
||||
{"version":"v17.0.0-nightly20211018c0a70203de","date":"2021-10-18","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"8.0.0","v8":"9.5.172.21","uv":"1.42.0","zlib":"1.2.11","openssl":"3.0.0+quic","modules":"102","lts":false,"security":false},
|
||||
|
||||
{"version":"v16.0.0-nightly20210420a0261d231c","date":"2021-04-20","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"7.10.0","v8":"9.0.257.17","uv":"1.41.0","zlib":"1.2.11","openssl":"1.1.1k+quic","modules":"93","lts":false,"security":false},
|
||||
{"version":"v16.0.0-nightly20210417bc31dc0e0f","date":"2021-04-17","files":["aix-ppc64","headers","linux-arm64","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"7.10.0","v8":"9.0.257.17","uv":"1.41.0","zlib":"1.2.11","openssl":"1.1.1k+quic","modules":"93","lts":false,"security":false},
|
||||
{"version":"v16.0.0-nightly20210416d3162da8dd","date":"2021-04-16","files":["aix-ppc64","headers","linux-arm64","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"7.9.0","v8":"9.0.257.17","uv":"1.41.0","zlib":"1.2.11","openssl":"1.1.1k+quic","modules":"93","lts":false,"security":false},
|
||||
{"version":"v16.0.0-nightly20210415c3a5e15ebe","date":"2021-04-15","files":["aix-ppc64","headers","linux-arm64","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"7.9.0","v8":"9.0.257.17","uv":"1.41.0","zlib":"1.2.11","openssl":"1.1.1k+quic","modules":"93","lts":false,"security":false},
|
||||
|
||||
|
||||
{"version":"v15.0.0-nightly2020102011f1ad939f","date":"2020-10-20","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"7.0.2","v8":"8.6.395.16","uv":"1.40.0","zlib":"1.2.11","openssl":"1.1.1g","modules":"88","lts":false,"security":false},
|
||||
{"version":"v15.0.0-nightly20201019c55f661551","date":"2020-10-19","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"7.0.2","v8":"8.6.395.16","uv":"1.40.0","zlib":"1.2.11","openssl":"1.1.1g","modules":"88","lts":false,"security":false},
|
||||
{"version":"v14.0.0-nightly20200421c3554307c6","date":"2020-04-21","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.14.4","v8":"8.1.307.30","uv":"1.37.0","zlib":"1.2.11","openssl":"1.1.1f","modules":"83","lts":false,"security":false},
|
||||
{"version":"v14.0.0-nightly202004204af0598134","date":"2020-04-20","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.14.4","v8":"8.1.307.26","uv":"1.37.0","zlib":"1.2.11","openssl":"1.1.1f","modules":"83","lts":false,"security":false},
|
||||
|
||||
{"version":"v13.13.1-nightly20200415947ddec091","date":"2020-04-15","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.14.4","v8":"7.9.317.25","uv":"1.35.0","zlib":"1.2.11","openssl":"1.1.1f","modules":"79","lts":false,"security":false},
|
||||
{"version":"v13.11.1-nightly2020032628e298f219","date":"2020-03-26","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.14.3","v8":"7.9.317.25","uv":"1.35.0","zlib":"1.2.11","openssl":"1.1.1e","modules":"79","lts":false,"security":false},
|
||||
|
||||
{"version":"v13.10.2-nightly202003056122620832","date":"2020-03-05","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.13.7","v8":"7.9.317.25","uv":"1.34.2","zlib":"1.2.11","openssl":"1.1.1d","modules":"79","lts":false,"security":false},
|
||||
{"version":"v13.9.1-nightly202003041bca7b6c70","date":"2020-03-04","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.13.7","v8":"7.9.317.25","uv":"1.34.2","zlib":"1.2.11","openssl":"1.1.1d","modules":"79","lts":false,"security":false},
|
||||
{"version":"v13.0.0-nightly201908175e3b4d6ed9","date":"2019-08-17","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.10.2","v8":"7.6.303.28","uv":"1.31.0","zlib":"1.2.11","openssl":"1.1.1c","modules":"77","lts":false,"security":true},
|
||||
{"version":"v13.0.0-nightly2019081671b5ce5885","date":"2019-08-16","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.10.2","v8":"7.6.303.28","uv":"1.31.0","zlib":"1.2.11","openssl":"1.1.1c","modules":"77","lts":false,"security":true},
|
||||
{"version":"v13.0.0-nightly2019072962a809fa54","date":"2019-07-29","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"npm":"6.10.0","v8":"7.5.288.22","uv":"1.30.1","zlib":"1.2.11","openssl":"1.1.1c","modules":"74","lts":false,"security":false}
|
||||
|
||||
]
|
28
__tests__/data/node-rc-index.json
Normal file
28
__tests__/data/node-rc-index.json
Normal file
@ -0,0 +1,28 @@
|
||||
[
|
||||
{"version":"v19.0.0-rc.2","date":"2022-10-14","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v19.0.0-rc.1","date":"2022-10-04","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v17.0.0-rc.1","date":"2021-10-05","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v17.0.0-rc.0","date":"2021-09-21","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v16.17.0-rc.1","date":"2022-08-06","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-arm64-tar","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v16.0.0-rc.2","date":"2021-04-07","files":["headers","linux-arm64","linux-ppc64le","linux-x64","osx-x64-pkg","osx-x64-tar","src"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v16.0.0-rc.1","date":"2021-03-30","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v16.0.0-rc.0","date":"2021-03-19","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.19.0-rc.0","date":"2022-01-25","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.18.0-rc.0","date":"2021-09-08","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.17.4-rc.0","date":"2021-07-20","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.17.1-rc.0","date":"2021-06-11","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.16.0-rc.0","date":"2021-02-22","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.15.5-rc.1","date":"2021-02-08","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.15.5-rc.0","date":"2021-01-27","files":["headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.15.2-rc.0","date":"2020-12-14","files":["headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v14.7.0-rc.1","date":"2020-07-29","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.11.0-rc.1","date":"2020-03-11","files":["headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.11.0-rc.0","date":"2020-03-10","files":["headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-x64","osx-x64-pkg","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.10.1-rc.0","date":"2020-03-04","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.4.0-rc.0","date":"2019-12-13","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.0.1-rc.0","date":"2019-10-23","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.0.0-rc.3","date":"2019-10-21","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.0.0-rc.2","date":"2019-10-15","files":["headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.0.0-rc.1","date":"2019-10-01","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false},
|
||||
{"version":"v13.0.0-rc.0","date":"2019-09-25","files":["aix-ppc64","headers","linux-arm64","linux-armv7l","linux-ppc64le","linux-s390x","linux-x64","osx-x64-pkg","osx-x64-tar","src","sunos-x64","win-x64-7z","win-x64-exe","win-x64-msi","win-x64-zip","win-x86-7z","win-x86-exe","win-x86-msi","win-x86-zip"],"v8":"","uv":"","zlib":null,"openssl":null,"modules":null,"lts":false,"security":false}
|
||||
]
|
161
__tests__/data/pnpm-lock.yaml
generated
161
__tests__/data/pnpm-lock.yaml
generated
@ -7,9 +7,11 @@ dependencies:
|
||||
express: 4.17.1
|
||||
|
||||
packages:
|
||||
|
||||
/accepts/1.3.7:
|
||||
resolution: {integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
mime-types: 2.1.31
|
||||
@ -21,7 +23,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/body-parser/1.19.0:
|
||||
resolution: {integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dependencies:
|
||||
bytes: 3.1.0
|
||||
@ -37,19 +42,28 @@ packages:
|
||||
dev: false
|
||||
|
||||
/bytes/3.1.0:
|
||||
resolution: {integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dev: false
|
||||
|
||||
/content-disposition/0.5.3:
|
||||
resolution: {integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
safe-buffer: 5.1.2
|
||||
dev: false
|
||||
|
||||
/content-type/1.0.4:
|
||||
resolution: {integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
@ -58,12 +72,18 @@ packages:
|
||||
dev: false
|
||||
|
||||
/cookie/0.4.0:
|
||||
resolution: {integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
/debug/2.6.9:
|
||||
resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
|
||||
}
|
||||
dependencies:
|
||||
ms: 2.0.0
|
||||
dev: false
|
||||
@ -96,7 +116,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/express/4.17.1:
|
||||
resolution: {integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==
|
||||
}
|
||||
engines: {node: '>= 0.10.0'}
|
||||
dependencies:
|
||||
accepts: 1.3.7
|
||||
@ -132,7 +155,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/finalhandler/1.1.2:
|
||||
resolution: {integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dependencies:
|
||||
debug: 2.6.9
|
||||
@ -145,7 +171,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/forwarded/0.2.0:
|
||||
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
@ -155,7 +184,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/http-errors/1.7.2:
|
||||
resolution: {integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
depd: 1.1.2
|
||||
@ -166,7 +198,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/http-errors/1.7.3:
|
||||
resolution: {integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
depd: 1.1.2
|
||||
@ -177,7 +212,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/iconv-lite/0.4.24:
|
||||
resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
|
||||
}
|
||||
engines: {node: '>=0.10.0'}
|
||||
dependencies:
|
||||
safer-buffer: 2.1.2
|
||||
@ -188,11 +226,17 @@ packages:
|
||||
dev: false
|
||||
|
||||
/inherits/2.0.4:
|
||||
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/ipaddr.js/1.9.1:
|
||||
resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
|
||||
}
|
||||
engines: {node: '>= 0.10'}
|
||||
dev: false
|
||||
|
||||
@ -211,19 +255,28 @@ packages:
|
||||
dev: false
|
||||
|
||||
/mime-db/1.48.0:
|
||||
resolution: {integrity: sha512-FM3QwxV+TnZYQ2aRqhlKBMHxk10lTbMt3bBkMAp54ddrNeVSfcQYOOKuGuy3Ddrm38I04If834fOUSq1yzslJQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-FM3QwxV+TnZYQ2aRqhlKBMHxk10lTbMt3bBkMAp54ddrNeVSfcQYOOKuGuy3Ddrm38I04If834fOUSq1yzslJQ==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
/mime-types/2.1.31:
|
||||
resolution: {integrity: sha512-XGZnNzm3QvgKxa8dpzyhFTHmpP3l5YNusmne07VUOXxou9CqUqYa/HBy124RqtVh/O2pECas/MOcsDgpilPOPg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-XGZnNzm3QvgKxa8dpzyhFTHmpP3l5YNusmne07VUOXxou9CqUqYa/HBy124RqtVh/O2pECas/MOcsDgpilPOPg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
mime-db: 1.48.0
|
||||
dev: false
|
||||
|
||||
/mime/1.6.0:
|
||||
resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
|
||||
}
|
||||
engines: {node: '>=4'}
|
||||
hasBin: true
|
||||
dev: false
|
||||
@ -233,11 +286,17 @@ packages:
|
||||
dev: false
|
||||
|
||||
/ms/2.1.1:
|
||||
resolution: {integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/negotiator/0.6.2:
|
||||
resolution: {integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
@ -249,7 +308,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/parseurl/1.3.3:
|
||||
resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dev: false
|
||||
|
||||
@ -258,7 +320,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/proxy-addr/2.0.7:
|
||||
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==
|
||||
}
|
||||
engines: {node: '>= 0.10'}
|
||||
dependencies:
|
||||
forwarded: 0.2.0
|
||||
@ -266,17 +331,26 @@ packages:
|
||||
dev: false
|
||||
|
||||
/qs/6.7.0:
|
||||
resolution: {integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==
|
||||
}
|
||||
engines: {node: '>=0.6'}
|
||||
dev: false
|
||||
|
||||
/range-parser/1.2.1:
|
||||
resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
/raw-body/2.4.0:
|
||||
resolution: {integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dependencies:
|
||||
bytes: 3.1.0
|
||||
@ -286,15 +360,24 @@ packages:
|
||||
dev: false
|
||||
|
||||
/safe-buffer/5.1.2:
|
||||
resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/safer-buffer/2.1.2:
|
||||
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/send/0.17.1:
|
||||
resolution: {integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==
|
||||
}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
dependencies:
|
||||
debug: 2.6.9
|
||||
@ -313,7 +396,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/serve-static/1.14.1:
|
||||
resolution: {integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==
|
||||
}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
dependencies:
|
||||
encodeurl: 1.0.2
|
||||
@ -323,7 +409,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/setprototypeof/1.1.1:
|
||||
resolution: {integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/statuses/1.5.0:
|
||||
@ -332,12 +421,18 @@ packages:
|
||||
dev: false
|
||||
|
||||
/toidentifier/1.0.0:
|
||||
resolution: {integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==
|
||||
}
|
||||
engines: {node: '>=0.6'}
|
||||
dev: false
|
||||
|
||||
/type-is/1.6.18:
|
||||
resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
media-typer: 0.3.0
|
||||
|
537
__tests__/data/v8-canary-dist-index.json
Normal file
537
__tests__/data/v8-canary-dist-index.json
Normal file
@ -0,0 +1,537 @@
|
||||
[
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221103f7e2421e91",
|
||||
"date": "2022-11-03",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.138.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary202211026bf85d0fb4",
|
||||
"date": "2022-11-02",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.130.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221101e50e45c9f8",
|
||||
"date": "2022-11-01",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.129.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary202210311b1e675ad0",
|
||||
"date": "2022-10-31",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.125.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221030fefe1c0879",
|
||||
"date": "2022-10-30",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.125.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary202210293881e51ba2",
|
||||
"date": "2022-10-29",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.122.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary202210286fe49d2a49",
|
||||
"date": "2022-10-28",
|
||||
"files": [
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.112.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221027c470b3108c",
|
||||
"date": "2022-10-27",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.101.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221026c24f7d1e4a",
|
||||
"date": "2022-10-26",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.88.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221025b063237e20",
|
||||
"date": "2022-10-25",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.73.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary2022102454996f930f",
|
||||
"date": "2022-10-24",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.61.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary2022102310ff1e5a8d",
|
||||
"date": "2022-10-23",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.61.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221022e83bcb6c41",
|
||||
"date": "2022-10-22",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip",
|
||||
"win-x86-7z",
|
||||
"win-x86-exe",
|
||||
"win-x86-msi",
|
||||
"win-x86-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.60.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221021f6d5f347fa",
|
||||
"date": "2022-10-21",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.48.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221020f78c149307",
|
||||
"date": "2022-10-20",
|
||||
"files": [
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.38.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v20.0.0-v8-canary20221019d52c76f76e",
|
||||
"date": "2022-10-19",
|
||||
"files": [
|
||||
"aix-ppc64",
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-pkg",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.27.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v19.0.0-v8-canary202210187d6960f23f",
|
||||
"date": "2022-10-18",
|
||||
"files": [
|
||||
"aix-ppc64",
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.12.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
},
|
||||
{
|
||||
"version": "v19.0.0-v8-canary202210172ec229fc56",
|
||||
"date": "2022-10-17",
|
||||
"files": [
|
||||
"aix-ppc64",
|
||||
"headers",
|
||||
"linux-arm64",
|
||||
"linux-armv7l",
|
||||
"linux-ppc64le",
|
||||
"linux-s390x",
|
||||
"linux-x64",
|
||||
"osx-arm64-tar",
|
||||
"osx-x64-tar",
|
||||
"src",
|
||||
"win-x64-7z",
|
||||
"win-x64-exe",
|
||||
"win-x64-msi",
|
||||
"win-x64-zip"
|
||||
],
|
||||
"npm": "8.19.2",
|
||||
"v8": "10.9.6.0",
|
||||
"uv": "1.43.0",
|
||||
"zlib": "1.2.11",
|
||||
"openssl": "3.0.5+quic",
|
||||
"modules": "112",
|
||||
"lts": false,
|
||||
"security": false
|
||||
}
|
||||
]
|
303
__tests__/main.test.ts
Normal file
303
__tests__/main.test.ts
Normal file
@ -0,0 +1,303 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import osm from 'os';
|
||||
|
||||
import each from 'jest-each';
|
||||
|
||||
import * as main from '../src/main';
|
||||
import * as util from '../src/util';
|
||||
import OfficialBuilds from '../src/distributions/official_builds/official_builds';
|
||||
|
||||
describe('main tests', () => {
|
||||
let inputs = {} as any;
|
||||
let os = {} as any;
|
||||
|
||||
let infoSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let inSpy: jest.SpyInstance;
|
||||
let setOutputSpy: jest.SpyInstance;
|
||||
let startGroupSpy: jest.SpyInstance;
|
||||
let endGroupSpy: jest.SpyInstance;
|
||||
|
||||
let existsSpy: jest.SpyInstance;
|
||||
|
||||
let getExecOutputSpy: jest.SpyInstance;
|
||||
|
||||
let parseNodeVersionSpy: jest.SpyInstance;
|
||||
let cnSpy: jest.SpyInstance;
|
||||
let findSpy: jest.SpyInstance;
|
||||
let isCacheActionAvailable: jest.SpyInstance;
|
||||
|
||||
let setupNodeJsSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
inputs = {};
|
||||
|
||||
// node
|
||||
os = {};
|
||||
console.log('::stop-commands::stoptoken');
|
||||
process.env['GITHUB_PATH'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
process.env['GITHUB_OUTPUT'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
infoSpy = jest.spyOn(core, 'info');
|
||||
infoSpy.mockImplementation(() => {});
|
||||
setOutputSpy = jest.spyOn(core, 'setOutput');
|
||||
setOutputSpy.mockImplementation(() => {});
|
||||
warningSpy = jest.spyOn(core, 'warning');
|
||||
warningSpy.mockImplementation(() => {});
|
||||
startGroupSpy = jest.spyOn(core, 'startGroup');
|
||||
startGroupSpy.mockImplementation(() => {});
|
||||
endGroupSpy = jest.spyOn(core, 'endGroup');
|
||||
endGroupSpy.mockImplementation(() => {});
|
||||
inSpy = jest.spyOn(core, 'getInput');
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
getExecOutputSpy = jest.spyOn(exec, 'getExecOutput');
|
||||
|
||||
findSpy = jest.spyOn(tc, 'find');
|
||||
|
||||
isCacheActionAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
|
||||
cnSpy = jest.spyOn(process.stdout, 'write');
|
||||
cnSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('write:' + line + '\n');
|
||||
});
|
||||
|
||||
setupNodeJsSpy = jest.spyOn(OfficialBuilds.prototype, 'setupNodeJs');
|
||||
setupNodeJsSpy.mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
//jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
console.log('::stoptoken::');
|
||||
jest.restoreAllMocks();
|
||||
}, 100000);
|
||||
|
||||
describe('parseNodeVersionFile', () => {
|
||||
each`
|
||||
contents | expected
|
||||
${'12'} | ${'12'}
|
||||
${'12.3'} | ${'12.3'}
|
||||
${'12.3.4'} | ${'12.3.4'}
|
||||
${'v12.3.4'} | ${'12.3.4'}
|
||||
${'lts/erbium'} | ${'lts/erbium'}
|
||||
${'lts/*'} | ${'lts/*'}
|
||||
${'nodejs 12.3.4'} | ${'12.3.4'}
|
||||
${'ruby 2.3.4\nnodejs 12.3.4\npython 3.4.5'} | ${'12.3.4'}
|
||||
${''} | ${''}
|
||||
${'unknown format'} | ${'unknown format'}
|
||||
${' 14.1.0 '} | ${'14.1.0'}
|
||||
${'{"volta": {"node": ">=14.0.0 <=17.0.0"}}'}| ${'>=14.0.0 <=17.0.0'}
|
||||
${'{"engines": {"node": "17.0.0"}}'} | ${'17.0.0'}
|
||||
`.it('parses "$contents"', ({contents, expected}) => {
|
||||
expect(util.parseNodeVersionFile(contents)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('printEnvDetailsAndSetOutput', () => {
|
||||
it.each([
|
||||
[{node: '12.0.2', npm: '6.3.3', yarn: '1.22.11'}],
|
||||
[{node: '16.0.2', npm: '7.3.3', yarn: '2.22.11'}],
|
||||
[{node: '14.0.1', npm: '8.1.0', yarn: '3.2.1'}],
|
||||
[{node: '17.0.2', npm: '6.3.3', yarn: ''}]
|
||||
])('Tools versions %p', async obj => {
|
||||
getExecOutputSpy.mockImplementation(async command => {
|
||||
if (Reflect.has(obj, command) && !obj[command]) {
|
||||
return {
|
||||
stdout: '',
|
||||
stderr: `${command} does not exist`,
|
||||
exitCode: 1
|
||||
};
|
||||
}
|
||||
|
||||
return {stdout: obj[command], stderr: '', exitCode: 0};
|
||||
});
|
||||
|
||||
await util.printEnvDetailsAndSetOutput();
|
||||
|
||||
expect(setOutputSpy).toHaveBeenCalledWith('node-version', obj['node']);
|
||||
Object.getOwnPropertyNames(obj).forEach(name => {
|
||||
if (!obj[name]) {
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`[warning]${name} does not exist`
|
||||
);
|
||||
}
|
||||
expect(infoSpy).toHaveBeenCalledWith(`${name}: ${obj[name]}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('node-version-file flag', () => {
|
||||
beforeEach(() => {
|
||||
parseNodeVersionSpy = jest.spyOn(util, 'parseNodeVersionFile');
|
||||
});
|
||||
|
||||
it('not used if node-version is provided', async () => {
|
||||
// Arrange
|
||||
inputs['node-version'] = '12';
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledTimes(0);
|
||||
}, 10000);
|
||||
|
||||
it('not used if node-version-file not provided', async () => {
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('reads node-version-file if provided', async () => {
|
||||
// Arrange
|
||||
const versionSpec = 'v14';
|
||||
const versionFile = '.nvmrc';
|
||||
const expectedVersionSpec = '14';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec);
|
||||
existsSpy.mockImplementationOnce(
|
||||
input => input === path.join(__dirname, 'data', versionFile)
|
||||
);
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalledTimes(1);
|
||||
expect(existsSpy).toHaveReturnedWith(true);
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledWith(versionSpec);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Resolved ${versionFile} as ${expectedVersionSpec}`
|
||||
);
|
||||
}, 10000);
|
||||
|
||||
it('reads package.json as node-version-file if provided', async () => {
|
||||
// Arrange
|
||||
const versionSpec = fs.readFileSync(
|
||||
path.join(__dirname, 'data/package.json'),
|
||||
'utf-8'
|
||||
);
|
||||
const versionFile = 'package.json';
|
||||
const expectedVersionSpec = '14';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec);
|
||||
existsSpy.mockImplementationOnce(
|
||||
input => input === path.join(__dirname, 'data', versionFile)
|
||||
);
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalledTimes(1);
|
||||
expect(existsSpy).toHaveReturnedWith(true);
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledWith(versionSpec);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Resolved ${versionFile} as ${expectedVersionSpec}`
|
||||
);
|
||||
}, 10000);
|
||||
|
||||
it('both node-version-file and node-version are provided', async () => {
|
||||
inputs['node-version'] = '12';
|
||||
const versionSpec = 'v14';
|
||||
const versionFile = '.nvmrc';
|
||||
const expectedVersionSpec = '14';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, '..');
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec);
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalledTimes(0);
|
||||
expect(parseNodeVersionSpy).not.toHaveBeenCalled();
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
'Both node-version and node-version-file inputs are specified, only node-version will be used'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if node-version-file is not found', async () => {
|
||||
const versionFile = '.nvmrc';
|
||||
const versionFilePath = path.join(__dirname, '..', versionFile);
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
existsSpy.mockImplementationOnce(
|
||||
input => input === path.join(__dirname, 'data', versionFile)
|
||||
);
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalled();
|
||||
expect(existsSpy).toHaveReturnedWith(false);
|
||||
expect(parseNodeVersionSpy).not.toHaveBeenCalled();
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::The specified node version file at: ${versionFilePath} does not exist${osm.EOL}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache on GHES', () => {
|
||||
it('Should throw an error, because cache is not supported', async () => {
|
||||
inputs['node-version'] = '12';
|
||||
inputs['cache'] = 'npm';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
|
||||
// expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
process.env['GITHUB_SERVER_URL'] = 'https://www.test.com';
|
||||
isCacheActionAvailable.mockImplementation(() => false);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.`
|
||||
);
|
||||
});
|
||||
|
||||
it('Should throw an internal error', async () => {
|
||||
inputs['node-version'] = '12';
|
||||
inputs['cache'] = 'npm';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
|
||||
// expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
process.env['GITHUB_SERVER_URL'] = '';
|
||||
isCacheActionAvailable.mockImplementation(() => false);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
'The runner was not able to contact the cache service. Caching will be skipped'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
18
__tests__/mock/glob-mock.test.ts
Normal file
18
__tests__/mock/glob-mock.test.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import {MockGlobber} from './glob-mock';
|
||||
|
||||
describe('mocked globber tests', () => {
|
||||
it('globber should return generator', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const generator = globber.globGenerator();
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of generator) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
it('globber should return glob', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const result: string[] = await globber.glob();
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
});
|
29
__tests__/mock/glob-mock.ts
Normal file
29
__tests__/mock/glob-mock.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import {Globber} from '@actions/glob';
|
||||
|
||||
export class MockGlobber implements Globber {
|
||||
private readonly expected: string[];
|
||||
constructor(expected: string[]) {
|
||||
this.expected = expected;
|
||||
}
|
||||
getSearchPaths(): string[] {
|
||||
return this.expected.slice();
|
||||
}
|
||||
|
||||
async glob(): Promise<string[]> {
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of this.globGenerator()) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async *globGenerator(): AsyncGenerator<string, void> {
|
||||
for (const e of this.expected) {
|
||||
yield e;
|
||||
}
|
||||
}
|
||||
|
||||
static async create(expected: string[]): Promise<MockGlobber> {
|
||||
return new MockGlobber(expected);
|
||||
}
|
||||
}
|
608
__tests__/nightly-installer.test.ts
Normal file
608
__tests__/nightly-installer.test.ts
Normal file
@ -0,0 +1,608 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import * as httpm from '@actions/http-client';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as cache from '@actions/cache';
|
||||
import fs from 'fs';
|
||||
import cp from 'child_process';
|
||||
import osm from 'os';
|
||||
import path from 'path';
|
||||
import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let inputs = {} as any;
|
||||
let os = {} as any;
|
||||
|
||||
let inSpy: jest.SpyInstance;
|
||||
let findSpy: jest.SpyInstance;
|
||||
let findAllVersionsSpy: jest.SpyInstance;
|
||||
let cnSpy: jest.SpyInstance;
|
||||
let logSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let getManifestSpy: jest.SpyInstance;
|
||||
let getDistSpy: jest.SpyInstance;
|
||||
let platSpy: jest.SpyInstance;
|
||||
let archSpy: jest.SpyInstance;
|
||||
let dlSpy: jest.SpyInstance;
|
||||
let exSpy: jest.SpyInstance;
|
||||
let cacheSpy: jest.SpyInstance;
|
||||
let dbgSpy: jest.SpyInstance;
|
||||
let whichSpy: jest.SpyInstance;
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let mkdirpSpy: jest.SpyInstance;
|
||||
let cpSpy: jest.SpyInstance;
|
||||
let execSpy: jest.SpyInstance;
|
||||
let authSpy: jest.SpyInstance;
|
||||
let parseNodeVersionSpy: jest.SpyInstance;
|
||||
let isCacheActionAvailable: jest.SpyInstance;
|
||||
let getExecOutputSpy: jest.SpyInstance;
|
||||
let getJsonSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
// @actions/core
|
||||
console.log('::stop-commands::stoptoken'); // Disable executing of runner commands when running tests in actions
|
||||
process.env['GITHUB_PATH'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
process.env['GITHUB_OUTPUT'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
process.env['RUNNER_TEMP'] = '/runner_temp';
|
||||
inputs = {};
|
||||
inSpy = jest.spyOn(core, 'getInput');
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
// node
|
||||
os = {};
|
||||
platSpy = jest.spyOn(osm, 'platform');
|
||||
platSpy.mockImplementation(() => os['platform']);
|
||||
archSpy = jest.spyOn(osm, 'arch');
|
||||
archSpy.mockImplementation(() => os['arch']);
|
||||
execSpy = jest.spyOn(cp, 'execSync');
|
||||
|
||||
// @actions/tool-cache
|
||||
findSpy = jest.spyOn(tc, 'find');
|
||||
findAllVersionsSpy = jest.spyOn(tc, 'findAllVersions');
|
||||
dlSpy = jest.spyOn(tc, 'downloadTool');
|
||||
exSpy = jest.spyOn(tc, 'extractTar');
|
||||
cacheSpy = jest.spyOn(tc, 'cacheDir');
|
||||
getManifestSpy = jest.spyOn(tc, 'getManifestFromRepo');
|
||||
|
||||
// http-client
|
||||
getJsonSpy = jest.spyOn(httpm.HttpClient.prototype, 'getJson');
|
||||
|
||||
// io
|
||||
whichSpy = jest.spyOn(io, 'which');
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
mkdirpSpy = jest.spyOn(io, 'mkdirP');
|
||||
cpSpy = jest.spyOn(io, 'cp');
|
||||
|
||||
// @actions/tool-cache
|
||||
isCacheActionAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
|
||||
// disable authentication portion for installer tests
|
||||
authSpy = jest.spyOn(auth, 'configAuthentication');
|
||||
authSpy.mockImplementation(() => {});
|
||||
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else if (url.includes('/v8-canary')) {
|
||||
res = <INodeVersion[]>nodeV8CanaryTestDist;
|
||||
} else {
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
});
|
||||
|
||||
// writes
|
||||
cnSpy = jest.spyOn(process.stdout, 'write');
|
||||
logSpy = jest.spyOn(core, 'info');
|
||||
dbgSpy = jest.spyOn(core, 'debug');
|
||||
warningSpy = jest.spyOn(core, 'warning');
|
||||
cnSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('write:' + line + '\n');
|
||||
});
|
||||
logSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + line + '\n');
|
||||
});
|
||||
dbgSpy.mockImplementation(msg => {
|
||||
// uncomment to see debug output
|
||||
// process.stderr.write(msg + '\n');
|
||||
});
|
||||
warningSpy.mockImplementation(msg => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + msg + '\n');
|
||||
});
|
||||
|
||||
// @actions/exec
|
||||
getExecOutputSpy = jest.spyOn(exec, 'getExecOutput');
|
||||
getExecOutputSpy.mockImplementation(() => 'v16.15.0');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
//jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
console.log('::stoptoken::'); // Re-enable executing of runner commands when running tests in actions
|
||||
jest.restoreAllMocks();
|
||||
}, 100000);
|
||||
|
||||
//--------------------------------------------------
|
||||
// Found in cache tests
|
||||
//--------------------------------------------------
|
||||
|
||||
it('finds version in cache with stable true', async () => {
|
||||
inputs['node-version'] = '16-nightly';
|
||||
os['arch'] = 'x64';
|
||||
inputs.stable = 'true';
|
||||
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/16.0.0-nightly20210417bc31dc0e0f/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'12.0.1',
|
||||
'16.0.0-nightly20210415c3a5e15ebe',
|
||||
'16.0.0-nightly20210417bc31dc0e0f',
|
||||
'16.1.3'
|
||||
]);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(findSpy).toHaveBeenCalledWith(
|
||||
'node',
|
||||
'16.0.0-nightly20210417bc31dc0e0f',
|
||||
'x64'
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
});
|
||||
|
||||
it('finds version in cache with stable false', async () => {
|
||||
inputs['node-version'] = '16.0.0-nightly20210415c3a5e15ebe';
|
||||
os['arch'] = 'x64';
|
||||
inputs.stable = 'false';
|
||||
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/16.0.0-nightly20210415c3a5e15ebe/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'12.0.1',
|
||||
'16.0.0-nightly20210415c3a5e15ebe',
|
||||
'16.0.0-nightly20210417bc31dc0e0f',
|
||||
'16.1.3'
|
||||
]);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(findSpy).toHaveBeenCalledWith(
|
||||
'node',
|
||||
'16.0.0-nightly20210415c3a5e15ebe',
|
||||
'x64'
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
});
|
||||
|
||||
it('finds version in cache and adds it to the path', async () => {
|
||||
inputs['node-version'] = '16-nightly';
|
||||
os['arch'] = 'x64';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/16.0.0-nightly20210417bc31dc0e0f/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'12.0.1',
|
||||
'16.0.0-nightly20210415c3a5e15ebe',
|
||||
'16.0.0-nightly20210417bc31dc0e0f',
|
||||
'16.1.3'
|
||||
]);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(findSpy).toHaveBeenCalledWith(
|
||||
'node',
|
||||
'16.0.0-nightly20210417bc31dc0e0f',
|
||||
'x64'
|
||||
);
|
||||
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '16.0.0-nightly20210417bc31dc0e0f';
|
||||
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
'12.0.1',
|
||||
'16.0.0-nightly20210415c3a5e15ebe',
|
||||
'16.0.0-nightly20210417bc31dc0e0f',
|
||||
'16.1.3'
|
||||
]);
|
||||
|
||||
findSpy.mockImplementation(() => {
|
||||
throw new Error(errMsg);
|
||||
});
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith('::error::' + errMsg + osm.EOL);
|
||||
});
|
||||
|
||||
it('falls back to a version from node dist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
const versionSpec = '13.13.1-nightly20200415947ddec091';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/13.13.1-nightly20200415947ddec091/x64'
|
||||
);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('windows: falls back to exe version if not in manifest and not in node dist', async () => {
|
||||
os.platform = 'win32';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
const versionSpec = '13.13.1-nightly20200415947ddec091';
|
||||
|
||||
const workingUrls = [
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.exe`,
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.lib`
|
||||
];
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async url => {
|
||||
if (workingUrls.includes(url)) {
|
||||
return '/some/temp/path';
|
||||
}
|
||||
|
||||
throw new tc.HTTPError(404);
|
||||
});
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/13.13.1-nightly20200415947ddec091/x64'
|
||||
);
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
mkdirpSpy.mockImplementation(async () => {});
|
||||
cpSpy.mockImplementation(async () => {});
|
||||
|
||||
await main.run();
|
||||
|
||||
workingUrls.forEach(url => {
|
||||
expect(dlSpy).toHaveBeenCalledWith(url);
|
||||
});
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${toolPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('linux: does not fall back to exe version if not in manifest and not in node dist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
const versionSpec = '13.13.1-nightly20200415947ddec091';
|
||||
|
||||
const workingUrls = [
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.exe`,
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.lib`
|
||||
];
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async url => {
|
||||
if (workingUrls.includes(url)) {
|
||||
return '/some/temp/path';
|
||||
}
|
||||
|
||||
throw new tc.HTTPError(404);
|
||||
});
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/13.13.1-nightly20200415947ddec091/x64'
|
||||
);
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
mkdirpSpy.mockImplementation(async () => {});
|
||||
cpSpy.mockImplementation(async () => {});
|
||||
|
||||
await main.run();
|
||||
|
||||
workingUrls.forEach(url => {
|
||||
expect(dlSpy).not.toHaveBeenCalledWith(url);
|
||||
});
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::Unexpected HTTP response: 404${osm.EOL}`
|
||||
);
|
||||
});
|
||||
|
||||
it('does not find a version that does not exist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
const versionSpec = '10.13.1-nightly20200415947ddec091';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::Unable to find Node version '${versionSpec}' for platform ${os.platform} and architecture ${os.arch}.${osm.EOL}`
|
||||
);
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
const versionSpec = '18.0.0-nightly202204180699150267';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(() => {
|
||||
throw new Error(errMsg);
|
||||
});
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::error::${errMsg}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('acquires specified architecture of node', async () => {
|
||||
for (const {arch, version, osSpec} of [
|
||||
{
|
||||
arch: 'x86',
|
||||
version: '18.0.0-nightly202110204cb3e06ed8',
|
||||
osSpec: 'win32'
|
||||
},
|
||||
{
|
||||
arch: 'x86',
|
||||
version: '20.0.0-nightly2022101987cdf7d412',
|
||||
osSpec: 'win32'
|
||||
}
|
||||
]) {
|
||||
os.platform = osSpec;
|
||||
os.arch = arch;
|
||||
const fileExtension = os.platform === 'win32' ? '7z' : 'tar.gz';
|
||||
const platform = {
|
||||
linux: 'linux',
|
||||
darwin: 'darwin',
|
||||
win32: 'win'
|
||||
}[os.platform];
|
||||
|
||||
inputs['node-version'] = version;
|
||||
inputs['architecture'] = arch;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
const expectedUrl = `https://nodejs.org/download/nightly/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Acquiring ${version} - ${arch} from ${expectedUrl}`
|
||||
);
|
||||
}
|
||||
}, 100000);
|
||||
|
||||
describe('nightly versions', () => {
|
||||
it.each([
|
||||
[
|
||||
'17.5.0-nightly',
|
||||
'17.5.0-nightly20220209e43808936a',
|
||||
'https://nodejs.org/download/nightly/v17.5.0-nightly20220209e43808936a/node-v17.5.0-nightly20220209e43808936a-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'17-nightly',
|
||||
'17.5.0-nightly20220209e43808936a',
|
||||
'https://nodejs.org/download/nightly/v17.5.0-nightly20220209e43808936a/node-v17.5.0-nightly20220209e43808936a-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'18.0.0-nightly',
|
||||
'18.0.0-nightly20220419bde889bd4e',
|
||||
'https://nodejs.org/download/nightly/v18.0.0-nightly20220419bde889bd4e/node-v18.0.0-nightly20220419bde889bd4e-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'18-nightly',
|
||||
'18.0.0-nightly20220419bde889bd4e',
|
||||
'https://nodejs.org/download/nightly/v18.0.0-nightly20220419bde889bd4e/node-v18.0.0-nightly20220419bde889bd4e-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'20.0.0-nightly',
|
||||
'20.0.0-nightly2022101987cdf7d412',
|
||||
'https://nodejs.org/download/nightly/v20.0.0-nightly2022101987cdf7d412/node-v20.0.0-nightly2022101987cdf7d412-linux-x64.tar.gz'
|
||||
]
|
||||
])(
|
||||
'finds the versions in the index.json and installs it',
|
||||
async (input, expectedVersion, expectedUrl) => {
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Acquiring ${expectedVersion} - ${os.arch} from ${expectedUrl}`
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith('Extracting ...');
|
||||
expect(logSpy).toHaveBeenCalledWith('Adding to the cache ...');
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
['17.5.0-nightly', '17.5.0-nightly20220209e43808936a'],
|
||||
['17-nightly', '17.5.0-nightly20220209e43808936a'],
|
||||
['20.0.0-nightly', '20.0.0-nightly2022101987cdf7d412']
|
||||
])(
|
||||
'finds the %s version in the hostedToolcache',
|
||||
async (input, expectedVersion) => {
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
findSpy.mockReturnValue(toolPath);
|
||||
findAllVersionsSpy.mockReturnValue([
|
||||
'17.5.0-nightly20220209e43808936a',
|
||||
'17.5.0-nightly20220209e43808935a',
|
||||
'20.0.0-nightly2022101987cdf7d412',
|
||||
'20.0.0-nightly2022101987cdf7d411'
|
||||
]);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(findAllVersionsSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
[
|
||||
'17.5.0-nightly',
|
||||
'17.5.0-nightly20220209e43808936a',
|
||||
'17.0.0-nightly202110193f11666dc7',
|
||||
'https://nodejs.org/download/nightly/v17.5.0-nightly20220209e43808936a/node-v17.5.0-nightly20220209e43808936a-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'17-nightly',
|
||||
'17.5.0-nightly20220209e43808936a',
|
||||
'17.0.0-nightly202110193f11666dc7',
|
||||
'https://nodejs.org/download/nightly/v17.5.0-nightly20220209e43808936a/node-v17.5.0-nightly20220209e43808936a-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'18.0.0-nightly',
|
||||
'18.0.0-nightly20220419bde889bd4e',
|
||||
'18.0.0-nightly202204180699150267',
|
||||
'https://nodejs.org/download/nightly/v18.0.0-nightly20220419bde889bd4e/node-v18.0.0-nightly20220419bde889bd4e-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'18-nightly',
|
||||
'18.0.0-nightly20220419bde889bd4e',
|
||||
'18.0.0-nightly202204180699150267',
|
||||
'https://nodejs.org/download/nightly/v18.0.0-nightly20220419bde889bd4e/node-v18.0.0-nightly20220419bde889bd4e-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'20.0.0-nightly',
|
||||
'20.0.0-nightly2022101987cdf7d412',
|
||||
'20.0.0-nightly2022101987cdf7d411',
|
||||
'https://nodejs.org/download/nightly/v20.0.0-nightly2022101987cdf7d412/node-v20.0.0-nightly2022101987cdf7d412-linux-x64.tar.gz'
|
||||
]
|
||||
])(
|
||||
'get %s version from dist if check-latest is true',
|
||||
async (input, expectedVersion, foundVersion, expectedUrl) => {
|
||||
const foundToolPath = path.normalize(`/cache/node/${foundVersion}/x64`);
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
inputs['check-latest'] = 'true';
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
|
||||
findSpy.mockReturnValue(foundToolPath);
|
||||
findAllVersionsSpy.mockReturnValue([
|
||||
'17.0.0-nightly202110193f11666dc7',
|
||||
'18.0.0-nightly202204180699150267',
|
||||
'20.0.0-nightly2022101987cdf7d411'
|
||||
]);
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(findAllVersionsSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Acquiring ${expectedVersion} - ${os.arch} from ${expectedUrl}`
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith('Extracting ...');
|
||||
expect(logSpy).toHaveBeenCalledWith('Adding to the cache ...');
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
@ -1,31 +1,36 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import * as httpm from '@actions/http-client';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as im from '../src/installer';
|
||||
import * as cache from '@actions/cache';
|
||||
import fs from 'fs';
|
||||
import cp from 'child_process';
|
||||
import osm = require('os');
|
||||
import osm from 'os';
|
||||
import path from 'path';
|
||||
import each from 'jest-each';
|
||||
import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import OfficialBuilds from '../src/distributions/official_builds/official_builds';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
let nodeTestManifest = require('./data/versions-manifest.json');
|
||||
let nodeTestDist = require('./data/node-dist-index.json');
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let build: OfficialBuilds;
|
||||
let inputs = {} as any;
|
||||
let os = {} as any;
|
||||
|
||||
let inSpy: jest.SpyInstance;
|
||||
let findSpy: jest.SpyInstance;
|
||||
let findAllVersionsSpy: jest.SpyInstance;
|
||||
let cnSpy: jest.SpyInstance;
|
||||
let logSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let getManifestSpy: jest.SpyInstance;
|
||||
let getDistSpy: jest.SpyInstance;
|
||||
let platSpy: jest.SpyInstance;
|
||||
let archSpy: jest.SpyInstance;
|
||||
let dlSpy: jest.SpyInstance;
|
||||
@ -38,9 +43,9 @@ describe('setup-node', () => {
|
||||
let mkdirpSpy: jest.SpyInstance;
|
||||
let execSpy: jest.SpyInstance;
|
||||
let authSpy: jest.SpyInstance;
|
||||
let parseNodeVersionSpy: jest.SpyInstance;
|
||||
let isCacheActionAvailable: jest.SpyInstance;
|
||||
let getExecOutputSpy: jest.SpyInstance;
|
||||
let getJsonSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
// @actions/core
|
||||
@ -61,12 +66,14 @@ describe('setup-node', () => {
|
||||
|
||||
// @actions/tool-cache
|
||||
findSpy = jest.spyOn(tc, 'find');
|
||||
findAllVersionsSpy = jest.spyOn(tc, 'findAllVersions');
|
||||
dlSpy = jest.spyOn(tc, 'downloadTool');
|
||||
exSpy = jest.spyOn(tc, 'extractTar');
|
||||
cacheSpy = jest.spyOn(tc, 'cacheDir');
|
||||
getManifestSpy = jest.spyOn(tc, 'getManifestFromRepo');
|
||||
getDistSpy = jest.spyOn(im, 'getVersionsFromDist');
|
||||
parseNodeVersionSpy = jest.spyOn(im, 'parseNodeVersionFile');
|
||||
|
||||
// http-client
|
||||
getJsonSpy = jest.spyOn(httpm.HttpClient.prototype, 'getJson');
|
||||
|
||||
// io
|
||||
whichSpy = jest.spyOn(io, 'which');
|
||||
@ -84,7 +91,19 @@ describe('setup-node', () => {
|
||||
getManifestSpy.mockImplementation(
|
||||
() => <tc.IToolRelease[]>nodeTestManifest
|
||||
);
|
||||
getDistSpy.mockImplementation(() => <im.INodeVersion>nodeTestDist);
|
||||
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else {
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
});
|
||||
|
||||
// writes
|
||||
cnSpy = jest.spyOn(process.stdout, 'write');
|
||||
@ -93,11 +112,11 @@ describe('setup-node', () => {
|
||||
warningSpy = jest.spyOn(core, 'warning');
|
||||
cnSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('write:' + line + '\n');
|
||||
process.stderr.write('write:' + line + '\n');
|
||||
});
|
||||
logSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + line + '\n');
|
||||
// uncomment to debug
|
||||
process.stderr.write('log:' + line + '\n');
|
||||
});
|
||||
dbgSpy.mockImplementation(msg => {
|
||||
// uncomment to see debug output
|
||||
@ -105,7 +124,7 @@ describe('setup-node', () => {
|
||||
});
|
||||
warningSpy.mockImplementation(msg => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + line + '\n');
|
||||
// process.stderr.write('log:' + msg + '\n');
|
||||
});
|
||||
|
||||
// @actions/exec
|
||||
@ -127,22 +146,6 @@ describe('setup-node', () => {
|
||||
//--------------------------------------------------
|
||||
// Manifest find tests
|
||||
//--------------------------------------------------
|
||||
it('can mock manifest versions', async () => {
|
||||
let versions: tc.IToolRelease[] | null = await tc.getManifestFromRepo(
|
||||
'actions',
|
||||
'node-versions',
|
||||
'mocktoken'
|
||||
);
|
||||
expect(versions).toBeDefined();
|
||||
expect(versions?.length).toBe(7);
|
||||
});
|
||||
|
||||
it('can mock dist versions', async () => {
|
||||
let versions: im.INodeVersion[] = await im.getVersionsFromDist();
|
||||
expect(versions).toBeDefined();
|
||||
expect(versions?.length).toBe(23);
|
||||
});
|
||||
|
||||
it.each([
|
||||
['12.16.2', 'darwin', '12.16.2', 'Erbium'],
|
||||
['12', 'linux', '12.16.2', 'Erbium'],
|
||||
@ -153,13 +156,13 @@ describe('setup-node', () => {
|
||||
async (versionSpec, platform, expectedVersion, expectedLts) => {
|
||||
os.platform = platform;
|
||||
os.arch = 'x64';
|
||||
let versions: tc.IToolRelease[] | null = await tc.getManifestFromRepo(
|
||||
const versions: tc.IToolRelease[] | null = await tc.getManifestFromRepo(
|
||||
'actions',
|
||||
'node-versions',
|
||||
'mocktoken'
|
||||
);
|
||||
expect(versions).toBeDefined();
|
||||
let match = await tc.findFromManifest(versionSpec, true, versions);
|
||||
const match = await tc.findFromManifest(versionSpec, true, versions);
|
||||
expect(match).toBeDefined();
|
||||
expect(match?.version).toBe(expectedVersion);
|
||||
expect((match as any).lts).toBe(expectedLts);
|
||||
@ -174,7 +177,7 @@ describe('setup-node', () => {
|
||||
inputs['node-version'] = '12';
|
||||
inputs.stable = 'true';
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
@ -186,7 +189,7 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
@ -198,16 +201,16 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
let errMsg = 'unhandled error message';
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '12';
|
||||
|
||||
findSpy.mockImplementation(() => {
|
||||
@ -228,27 +231,27 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
let versionSpec = '12.16.2';
|
||||
let resolvedVersion = versionSpec;
|
||||
const versionSpec = '12.16.2';
|
||||
const resolvedVersion = versionSpec;
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl =
|
||||
const expectedUrl =
|
||||
'https://github.com/actions/node-versions/releases/download/12.16.2-20200507.95/node-12.16.2-linux-x64.tar.gz';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/12.16.2/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.2/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(getExecOutputSpy).toHaveBeenCalledWith(
|
||||
'node',
|
||||
@ -281,36 +284,33 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '11.15.0';
|
||||
let resolvedVersion = versionSpec;
|
||||
const versionSpec = '11.15.0';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl =
|
||||
'https://github.com/actions/node-versions/releases/download/12.16.2-20200507.95/node-12.16.2-linux-x64.tar.gz';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
const toolPath = path.normalize('/cache/node/11.15.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
'Not found in manifest. Falling back to download directly from Node'
|
||||
);
|
||||
expect(getManifestSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Attempting to download ${versionSpec}...`
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
'Not found in manifest. Falling back to download directly from Node'
|
||||
);
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
@ -318,14 +318,14 @@ describe('setup-node', () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
let versionSpec = '9.99.9';
|
||||
const versionSpec = '9.99.9';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
await main.run();
|
||||
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
'Not found in manifest. Falling back to download directly from Node'
|
||||
'Not found in manifest. Falling back to download directly from Node'
|
||||
);
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Attempting to download ${versionSpec}...`
|
||||
@ -336,13 +336,13 @@ describe('setup-node', () => {
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
let errMsg = 'unhandled download message';
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
let versionSpec = '12.16.2';
|
||||
let resolvedVersion = versionSpec;
|
||||
const versionSpec = '12.16.2';
|
||||
const resolvedVersion = versionSpec;
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -376,7 +376,7 @@ describe('setup-node', () => {
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl =
|
||||
const expectedUrl =
|
||||
arch === 'x64'
|
||||
? `https://github.com/actions/node-versions/releases/download/${version}/node-${version}-${platform}-${arch}.zip`
|
||||
: `https://nodejs.org/dist/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
@ -385,7 +385,7 @@ describe('setup-node', () => {
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
@ -481,7 +481,7 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '11';
|
||||
const versionSpec = '11';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['check-latest'] = 'true';
|
||||
@ -492,13 +492,13 @@ describe('setup-node', () => {
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
const toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
@ -523,7 +523,7 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '12';
|
||||
const versionSpec = '12';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['check-latest'] = 'true';
|
||||
@ -537,13 +537,13 @@ describe('setup-node', () => {
|
||||
});
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/12.11.0/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.11.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
@ -563,164 +563,6 @@ describe('setup-node', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('node-version-file flag', () => {
|
||||
it('not used if node-version is provided', async () => {
|
||||
// Arrange
|
||||
inputs['node-version'] = '12';
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('not used if node-version-file not provided', async () => {
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
it('reads node-version-file if provided', async () => {
|
||||
// Arrange
|
||||
const versionSpec = 'v14';
|
||||
const versionFile = '.nvmrc';
|
||||
const expectedVersionSpec = '14';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec);
|
||||
existsSpy.mockImplementationOnce(
|
||||
input => input === path.join(__dirname, 'data', versionFile)
|
||||
);
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalledTimes(1);
|
||||
expect(existsSpy).toHaveReturnedWith(true);
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledWith(versionSpec);
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Resolved ${versionFile} as ${expectedVersionSpec}`
|
||||
);
|
||||
});
|
||||
|
||||
it('reads package.json as node-version-file if provided', async () => {
|
||||
// Arrange
|
||||
const versionSpec = fs.readFileSync(
|
||||
path.join(__dirname, 'data/package.json'),
|
||||
'utf-8'
|
||||
);
|
||||
const versionFile = 'package.json';
|
||||
const expectedVersionSpec = '14';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec);
|
||||
existsSpy.mockImplementationOnce(
|
||||
input => input === path.join(__dirname, 'data', versionFile)
|
||||
);
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalledTimes(1);
|
||||
expect(existsSpy).toHaveReturnedWith(true);
|
||||
expect(parseNodeVersionSpy).toHaveBeenCalledWith(versionSpec);
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Resolved ${versionFile} as ${expectedVersionSpec}`
|
||||
);
|
||||
});
|
||||
|
||||
it('both node-version-file and node-version are provided', async () => {
|
||||
inputs['node-version'] = '12';
|
||||
const versionSpec = 'v14';
|
||||
const versionFile = '.nvmrc';
|
||||
const expectedVersionSpec = '14';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, '..');
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec);
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalledTimes(0);
|
||||
expect(parseNodeVersionSpy).not.toHaveBeenCalled();
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
'Both node-version and node-version-file inputs are specified, only node-version will be used'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if node-version-file is not found', async () => {
|
||||
const versionFile = '.nvmrc';
|
||||
const versionFilePath = path.join(__dirname, '..', versionFile);
|
||||
inputs['node-version-file'] = versionFile;
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
existsSpy.mockImplementationOnce(
|
||||
input => input === path.join(__dirname, 'data', versionFile)
|
||||
);
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// Assert
|
||||
expect(existsSpy).toHaveBeenCalled();
|
||||
expect(existsSpy).toHaveReturnedWith(false);
|
||||
expect(parseNodeVersionSpy).not.toHaveBeenCalled();
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::The specified node version file at: ${versionFilePath} does not exist${osm.EOL}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache on GHES', () => {
|
||||
it('Should throw an error, because cache is not supported', async () => {
|
||||
inputs['node-version'] = '12';
|
||||
inputs['cache'] = 'npm';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
|
||||
// expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
process.env['GITHUB_SERVER_URL'] = 'https://www.test.com';
|
||||
isCacheActionAvailable.mockImplementation(() => false);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.${osm.EOL}`
|
||||
);
|
||||
});
|
||||
|
||||
it('Should throw an internal error', async () => {
|
||||
inputs['node-version'] = '12';
|
||||
inputs['cache'] = 'npm';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
|
||||
// expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
process.env['GITHUB_SERVER_URL'] = '';
|
||||
isCacheActionAvailable.mockImplementation(() => false);
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
'The runner was not able to contact the cache service. Caching will be skipped'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('LTS version', () => {
|
||||
beforeEach(() => {
|
||||
os.platform = 'linux';
|
||||
@ -936,36 +778,16 @@ describe('setup-node', () => {
|
||||
const toolPath = path.normalize(
|
||||
`/cache/node/${expectedVersion.version}/x64`
|
||||
);
|
||||
findSpy.mockReturnValue(toolPath);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
|
||||
// Act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
|
||||
expect(logSpy).toHaveBeenCalledWith('getting latest node version...');
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('helper methods', () => {
|
||||
describe('parseNodeVersionFile', () => {
|
||||
each`
|
||||
contents | expected
|
||||
${'12'} | ${'12'}
|
||||
${'12.3'} | ${'12.3'}
|
||||
${'12.3.4'} | ${'12.3.4'}
|
||||
${'v12.3.4'} | ${'12.3.4'}
|
||||
${'lts/erbium'} | ${'lts/erbium'}
|
||||
${'lts/*'} | ${'lts/*'}
|
||||
${'nodejs 12.3.4'} | ${'12.3.4'}
|
||||
${'ruby 2.3.4\nnodejs 12.3.4\npython 3.4.5'} | ${'12.3.4'}
|
||||
${''} | ${''}
|
||||
${'unknown format'} | ${'unknown format'}
|
||||
`.it('parses "$contents"', ({contents, expected}) => {
|
||||
expect(im.parseNodeVersionFile(contents)).toBe(expected);
|
||||
});
|
||||
});
|
||||
});
|
59
__tests__/prepare-yarn-subprojects.sh
Executable file
59
__tests__/prepare-yarn-subprojects.sh
Executable file
@ -0,0 +1,59 @@
|
||||
#!/bin/sh -e
|
||||
export YARN_ENABLE_IMMUTABLE_INSTALLS=false
|
||||
rm package.json
|
||||
rm package-lock.json
|
||||
echo "create yarn2 project in the sub2"
|
||||
mkdir sub2
|
||||
cd sub2
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 2.4.3
|
||||
yarn install
|
||||
|
||||
echo "create yarn3 project in the sub3"
|
||||
cd ..
|
||||
mkdir sub3
|
||||
cd sub3
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 3.5.1
|
||||
yarn install
|
||||
if [ x$1 = 'xglobal' ];then
|
||||
echo enableGlobalCache
|
||||
echo 'enableGlobalCache: true' >> .yarnrc.yml
|
||||
fi
|
||||
|
||||
cd ..
|
||||
if [ x$1 != 'xkeepcache' -a x$2 != 'xkeepcache' ]; then
|
||||
rm -rf sub2/.yarn/cache
|
||||
rm -rf sub3/.yarn/cache
|
||||
fi
|
||||
|
||||
if [ x$1 = 'xyarn1' ];then
|
||||
echo "create yarn1 project in the root"
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 1.22.19
|
||||
yarn install
|
||||
fi
|
@ -1,10 +1,12 @@
|
||||
import tscMatcher from '../.github/tsc.json';
|
||||
|
||||
describe('problem matcher tests', () => {
|
||||
it('tsc: matches TypeScript "pretty" error message', () => {
|
||||
const [
|
||||
{
|
||||
pattern: [{regexp}]
|
||||
}
|
||||
] = require('../.github/tsc.json').problemMatcher;
|
||||
] = tscMatcher.problemMatcher;
|
||||
const exampleErrorMessage =
|
||||
"lib/index.js:23:42 - error TS2345: Argument of type 'A' is not assignable to parameter of type 'B'.";
|
||||
|
||||
@ -25,7 +27,7 @@ describe('problem matcher tests', () => {
|
||||
{
|
||||
pattern: [{regexp}]
|
||||
}
|
||||
] = require('../.github/tsc.json').problemMatcher;
|
||||
] = tscMatcher.problemMatcher;
|
||||
const exampleErrorMessage =
|
||||
"lib/index.js(23,42): error TS2345: Argument of type 'A' is not assignable to parameter of type 'B'.";
|
||||
|
||||
|
402
__tests__/rc-installer.test.ts
Normal file
402
__tests__/rc-installer.test.ts
Normal file
@ -0,0 +1,402 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import * as httpm from '@actions/http-client';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as cache from '@actions/cache';
|
||||
import fs from 'fs';
|
||||
import cp from 'child_process';
|
||||
import osm from 'os';
|
||||
import path from 'path';
|
||||
import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let inputs = {} as any;
|
||||
let os = {} as any;
|
||||
|
||||
let inSpy: jest.SpyInstance;
|
||||
let findSpy: jest.SpyInstance;
|
||||
let findAllVersionsSpy: jest.SpyInstance;
|
||||
let cnSpy: jest.SpyInstance;
|
||||
let logSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let platSpy: jest.SpyInstance;
|
||||
let archSpy: jest.SpyInstance;
|
||||
let dlSpy: jest.SpyInstance;
|
||||
let exSpy: jest.SpyInstance;
|
||||
let cacheSpy: jest.SpyInstance;
|
||||
let dbgSpy: jest.SpyInstance;
|
||||
let whichSpy: jest.SpyInstance;
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let mkdirpSpy: jest.SpyInstance;
|
||||
let execSpy: jest.SpyInstance;
|
||||
let authSpy: jest.SpyInstance;
|
||||
let isCacheActionAvailable: jest.SpyInstance;
|
||||
let getExecOutputSpy: jest.SpyInstance;
|
||||
let getJsonSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
// @actions/core
|
||||
console.log('::stop-commands::stoptoken'); // Disable executing of runner commands when running tests in actions
|
||||
process.env['GITHUB_PATH'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
process.env['GITHUB_OUTPUT'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
inputs = {};
|
||||
inSpy = jest.spyOn(core, 'getInput');
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
// node
|
||||
os = {};
|
||||
platSpy = jest.spyOn(osm, 'platform');
|
||||
platSpy.mockImplementation(() => os['platform']);
|
||||
archSpy = jest.spyOn(osm, 'arch');
|
||||
archSpy.mockImplementation(() => os['arch']);
|
||||
execSpy = jest.spyOn(cp, 'execSync');
|
||||
|
||||
// @actions/tool-cache
|
||||
findSpy = jest.spyOn(tc, 'find');
|
||||
findAllVersionsSpy = jest.spyOn(tc, 'findAllVersions');
|
||||
dlSpy = jest.spyOn(tc, 'downloadTool');
|
||||
exSpy = jest.spyOn(tc, 'extractTar');
|
||||
cacheSpy = jest.spyOn(tc, 'cacheDir');
|
||||
// getDistSpy = jest.spyOn(im, 'getVersionsFromDist');
|
||||
|
||||
// http-client
|
||||
getJsonSpy = jest.spyOn(httpm.HttpClient.prototype, 'getJson');
|
||||
|
||||
// io
|
||||
whichSpy = jest.spyOn(io, 'which');
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
mkdirpSpy = jest.spyOn(io, 'mkdirP');
|
||||
|
||||
// @actions/tool-cache
|
||||
isCacheActionAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
isCacheActionAvailable.mockImplementation(() => false);
|
||||
|
||||
// disable authentication portion for installer tests
|
||||
authSpy = jest.spyOn(auth, 'configAuthentication');
|
||||
authSpy.mockImplementation(() => {});
|
||||
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else {
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
});
|
||||
|
||||
// writes
|
||||
cnSpy = jest.spyOn(process.stdout, 'write');
|
||||
logSpy = jest.spyOn(core, 'info');
|
||||
dbgSpy = jest.spyOn(core, 'debug');
|
||||
warningSpy = jest.spyOn(core, 'warning');
|
||||
cnSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('write:' + line + '\n');
|
||||
});
|
||||
logSpy.mockImplementation(line => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + line + '\n');
|
||||
});
|
||||
dbgSpy.mockImplementation(msg => {
|
||||
// uncomment to see debug output
|
||||
// process.stderr.write(msg + '\n');
|
||||
});
|
||||
warningSpy.mockImplementation(msg => {
|
||||
// uncomment to debug
|
||||
// process.stderr.write('log:' + msg + '\n');
|
||||
});
|
||||
|
||||
// @actions/exec
|
||||
getExecOutputSpy = jest.spyOn(exec, 'getExecOutput');
|
||||
getExecOutputSpy.mockImplementation(() => 'v16.15.0-rc.1');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
//jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
console.log('::stoptoken::'); // Re-enable executing of runner commands when running tests in actions
|
||||
jest.restoreAllMocks();
|
||||
}, 100000);
|
||||
|
||||
//--------------------------------------------------
|
||||
// Found in cache tests
|
||||
//--------------------------------------------------
|
||||
|
||||
it('finds version in cache with stable true', async () => {
|
||||
inputs['node-version'] = '12.0.0-rc.1';
|
||||
inputs.stable = 'true';
|
||||
|
||||
const toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
});
|
||||
|
||||
it('finds version in cache with stable not supplied', async () => {
|
||||
inputs['node-version'] = '12.0.0-rc.1';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
const toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
});
|
||||
|
||||
it('finds version in cache and adds it to the path', async () => {
|
||||
inputs['node-version'] = '12.0.0-rc.1';
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
const toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '12.0.0-rc.1';
|
||||
|
||||
findSpy.mockImplementation(() => {
|
||||
throw new Error(errMsg);
|
||||
});
|
||||
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith('::error::' + errMsg + osm.EOL);
|
||||
});
|
||||
|
||||
it('falls back to a version from node dist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
const versionSpec = '13.0.0-rc.0';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
const toolPath = path.normalize('/cache/node/13.0.0-rc.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith('Extracting ...');
|
||||
expect(logSpy).toHaveBeenCalledWith('Done');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('does not find a version that does not exist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
const versionSpec = '9.99.9-rc.1';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::Unable to find Node version '${versionSpec}' for platform ${os.platform} and architecture ${os.arch}.${osm.EOL}`
|
||||
);
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
const versionSpec = '14.7.0-rc.1';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
dlSpy.mockImplementation(() => {
|
||||
throw new Error(errMsg);
|
||||
});
|
||||
await main.run();
|
||||
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::error::${errMsg}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('acquires specified architecture of node', async () => {
|
||||
for (const {arch, version, osSpec} of [
|
||||
{arch: 'x86', version: '13.4.0-rc.0', osSpec: 'win32'},
|
||||
{arch: 'x86', version: '14.15.5-rc.0', osSpec: 'win32'}
|
||||
]) {
|
||||
os.platform = osSpec;
|
||||
os.arch = arch;
|
||||
const fileExtension = os.platform === 'win32' ? '7z' : 'tar.gz';
|
||||
const platform = {
|
||||
linux: 'linux',
|
||||
darwin: 'darwin',
|
||||
win32: 'win'
|
||||
}[os.platform];
|
||||
|
||||
inputs['node-version'] = version;
|
||||
inputs['architecture'] = arch;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
const expectedUrl = `https://nodejs.org/download/rc/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Acquiring ${version} - ${arch} from ${expectedUrl}`
|
||||
);
|
||||
}
|
||||
}, 100000);
|
||||
|
||||
describe('rc versions', () => {
|
||||
it.each([
|
||||
[
|
||||
'13.10.1-rc.0',
|
||||
'13.10.1-rc.0',
|
||||
'https://nodejs.org/download/rc/v13.10.1-rc.0/node-v13.10.1-rc.0-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'14.15.5-rc.1',
|
||||
'14.15.5-rc.1',
|
||||
'https://nodejs.org/download/rc/v14.15.5-rc.1/node-v14.15.5-rc.1-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'16.17.0-rc.1',
|
||||
'16.17.0-rc.1',
|
||||
'https://nodejs.org/download/rc/v16.17.0-rc.1/node-v16.17.0-rc.1-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'17.0.0-rc.1',
|
||||
'17.0.0-rc.1',
|
||||
'https://nodejs.org/download/rc/v17.0.0-rc.1/node-v17.0.0-rc.1-linux-x64.tar.gz'
|
||||
],
|
||||
[
|
||||
'19.0.0-rc.2',
|
||||
'19.0.0-rc.2',
|
||||
'https://nodejs.org/download/rc/v19.0.0-rc.2/node-v19.0.0-rc.2-linux-x64.tar.gz'
|
||||
]
|
||||
])(
|
||||
'finds the versions in the index.json and installs it',
|
||||
async (input, expectedVersion, expectedUrl) => {
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(logSpy).toHaveBeenCalledWith('Extracting ...');
|
||||
expect(logSpy).toHaveBeenCalledWith('Adding to the cache ...');
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
['13.10.1-rc.0', '13.10.1-rc.0'],
|
||||
['14.15.5-rc.1', '14.15.5-rc.1'],
|
||||
['16.17.0-rc.1', '16.17.0-rc.1'],
|
||||
['17.0.0-rc.1', '17.0.0-rc.1']
|
||||
])(
|
||||
'finds the %s version in the hostedToolcache',
|
||||
async (input, expectedVersion) => {
|
||||
const toolPath = path.normalize(`/cache/node/${expectedVersion}/x64`);
|
||||
findSpy.mockImplementation((_, version) =>
|
||||
path.normalize(`/cache/node/${version}/x64`)
|
||||
);
|
||||
findAllVersionsSpy.mockReturnValue([
|
||||
'2.2.2-rc.2',
|
||||
'1.1.1-rc.1',
|
||||
'99.1.1',
|
||||
expectedVersion,
|
||||
'88.1.1',
|
||||
'3.3.3-rc.3'
|
||||
]);
|
||||
|
||||
inputs['node-version'] = input;
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::add-path::${path.join(toolPath, 'bin')}${osm.EOL}`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it('throws an error if version is not found', async () => {
|
||||
const versionSpec = '19.0.0-rc.3';
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
os['arch'] = 'x64';
|
||||
os['platform'] = 'linux';
|
||||
// act
|
||||
await main.run();
|
||||
|
||||
// assert
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::Unable to find Node version '${versionSpec}' for platform ${os.platform} and architecture ${os.arch}.${osm.EOL}`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
@ -19,13 +19,13 @@ inputs:
|
||||
scope:
|
||||
description: 'Optional scope for authenticating against scoped registries. Will fall back to the repository owner when using the GitHub Packages registry (https://npm.pkg.github.com/).'
|
||||
token:
|
||||
description: Used to pull node distributions from node-versions. Since there's a default, this is typically not supplied by the user.
|
||||
default: ${{ github.token }}
|
||||
description: Used to pull node distributions from node-versions. Since there's a default, this is typically not supplied by the user. When running this action on github.com, the default value is sufficient. When running on GHES, you can pass a personal access token for github.com if you are experiencing rate limiting.
|
||||
default: ${{ github.server_url == 'https://github.com' && github.token || '' }}
|
||||
cache:
|
||||
description: 'Used to specify a package manager for caching in the default directory. Supported values: npm, yarn, pnpm.'
|
||||
cache-dependency-path:
|
||||
description: 'Used to specify the path to a dependency file: package-lock.json, yarn.lock, etc. Supports wildcards or a list of file names for caching multiple dependencies.'
|
||||
# TODO: add input to control forcing to pull from cloud or dist.
|
||||
# TODO: add input to control forcing to pull from cloud or dist.
|
||||
# escape valve for someone having issues or needing the absolute latest which isn't cached yet
|
||||
outputs:
|
||||
cache-hit:
|
||||
|
6958
dist/cache-save/index.js
vendored
6958
dist/cache-save/index.js
vendored
File diff suppressed because one or more lines are too long
6882
dist/setup/index.js
vendored
6882
dist/setup/index.js
vendored
File diff suppressed because one or more lines are too long
@ -24,7 +24,7 @@ To ensure that `yarn.lock` is always committed, use `yarn install --immutable` w
|
||||
**See also:**
|
||||
- [Documentation of `yarn.lock`](https://classic.yarnpkg.com/en/docs/yarn-lock)
|
||||
- [Documentation of `--frozen-lockfile` option](https://classic.yarnpkg.com/en/docs/cli/install#toc-yarn-install-frozen-lockfile)
|
||||
- [QA - Should lockfiles be committed to the repoistory?](https://yarnpkg.com/getting-started/qa/#should-lockfiles-be-committed-to-the-repository)
|
||||
- [QA - Should lockfiles be committed to the repository?](https://yarnpkg.com/getting-started/qa/#should-lockfiles-be-committed-to-the-repository)
|
||||
- [Documentation of `yarn install`](https://yarnpkg.com/cli/install)
|
||||
|
||||
### PNPM
|
||||
@ -104,6 +104,129 @@ jobs:
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
## V8 Canary versions
|
||||
|
||||
You can specify a nightly version to download it from https://nodejs.org/download/v8-canary.
|
||||
|
||||
### Install v8 canary build for specific node version
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Node sample
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '20.0.0-v8-canary' # it will install the latest v8 canary release for node 20.0.0
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
### Install v8 canary build for major node version
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Node sample
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '20-v8-canary' # it will install the latest v8 canary release for node 20
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
### Install the exact v8 canary version
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Node sample
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 'v20.1.1-v8-canary20221103f7e2421e91'
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
## Nightly versions
|
||||
|
||||
You can specify a nightly version to download it from https://nodejs.org/download/nightly.
|
||||
|
||||
### Install the nightly build for a major version
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Node sample
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16-nightly' # it will install the latest nightly release for node 16
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
### Install the nightly build for a specific version
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Node sample
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16.0.0-nightly' # it will install the latest nightly release for node 16.0.0
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
### Install an exact nightly version
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Node sample
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16.0.0-nightly20210420a0261d231c'
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
## RC versions
|
||||
|
||||
You can use specify a rc version to download it from https://nodejs.org/download/rc.
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Node sample
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16.0.0-rc.1'
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
|
||||
**Note:** Unlike nightly versions, which support version range specifiers, you must specify the exact version for a release candidate: `16.0.0-rc.1`.
|
||||
|
||||
## Caching packages data
|
||||
The action follows [actions/cache](https://github.com/actions/cache/blob/main/examples.md#node---npm) guidelines, and caches global cache on the machine instead of `node_modules`, so cache can be reused between different Node.js versions.
|
||||
|
||||
@ -138,10 +261,14 @@ steps:
|
||||
with:
|
||||
node-version: '14'
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
- run: pnpm install
|
||||
- run: pnpm test
|
||||
```
|
||||
|
||||
> **Note**: By default `--frozen-lockfile` option is passed starting from pnpm `6.10.x`. It will be automatically added if you run it on [CI](https://pnpm.io/cli/install#--frozen-lockfile).
|
||||
> If the `pnpm-lock.yaml` file changes then pass `--frozen-lockfile` option.
|
||||
|
||||
|
||||
**Using wildcard patterns to cache dependencies**
|
||||
```yaml
|
||||
steps:
|
||||
@ -278,11 +405,14 @@ steps:
|
||||
yarn config set npmScopes.my-org.npmAlwaysAuth true
|
||||
yarn config set npmScopes.my-org.npmAuthToken $NPM_AUTH_TOKEN
|
||||
env:
|
||||
NPM_AUTH_TOKEN: ${{ secrets.YARN_TOKEN }}
|
||||
NPM_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
```
|
||||
NOTE: As per https://github.com/actions/setup-node/issues/49 you cannot use `secrets.GITHUB_TOKEN` to access private GitHub Packages within the same organisation but in a different repository.
|
||||
|
||||
To access private GitHub Packages within the same organization, go to "Manage Actions access" in Package settings and set the repositories you want to access.
|
||||
|
||||
Please refer to the [Ensuring workflow access to your package - Configuring a package's access control and visibility](https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility#ensuring-workflow-access-to-your-package) for more details.
|
||||
|
||||
### always-auth input
|
||||
The always-auth input sets `always-auth=true` in .npmrc file. With this option set [npm](https://docs.npmjs.com/cli/v6/using-npm/config#always-auth)/yarn sends the authentication credentials when making a request to the registries.
|
||||
|
@ -52,7 +52,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub.
|
||||
|
||||
- Please check that no one else has already created a pull request with these or similar changes
|
||||
- Use a "feature branch" for your changes. That separates the changes in the pull request from your other changes and makes it easy to edit/amend commits in the pull request
|
||||
- **Run `pre-checkin` script to format, build and test changes**
|
||||
- **Run `pre-checkin` script to format, lint, build and test changes**
|
||||
- Make sure your changes are well formatted and that all tests are passing
|
||||
- If your pull request is connected to an open issue, please, leave a link to this issue in the `Related issue:` section
|
||||
- If you later need to add new commits to the pull request, you can simply commit the changes to the local branch and then push them. The pull request gets automatically updated
|
||||
@ -61,6 +61,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub.
|
||||
|
||||
- To implement new features or fix bugs, you need to make changes to the `.ts` files, which are located in the `src` folder
|
||||
- To comply with the code style, **you need to run the `format` script**
|
||||
- To lint the code, **you need to run the `lint:fix` script**
|
||||
- To transpile source code to `javascript` we use [NCC](https://github.com/vercel/ncc). **It is very important to run the `build` script after making changes**, otherwise your changes will not get into the final `javascript` build
|
||||
- You can also start formatting, building code, and testing with a single `pre-checkin` command
|
||||
|
||||
|
2825
package-lock.json
generated
2825
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
23
package.json
23
package.json
@ -6,10 +6,12 @@
|
||||
"main": "lib/setup-node.js",
|
||||
"scripts": {
|
||||
"build": "ncc build -o dist/setup src/setup-node.ts && ncc build -o dist/cache-save src/cache-save.ts",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"format": "prettier --no-error-on-unmatched-pattern --config ./.prettierrc.js --write \"**/*.{ts,yml,yaml}\"",
|
||||
"format-check": "prettier --no-error-on-unmatched-pattern --config ./.prettierrc.js --check \"**/*.{ts,yml,yaml}\"",
|
||||
"lint": "eslint --config ./.eslintrc.js \"**/*.ts\"",
|
||||
"lint:fix": "eslint --config ./.eslintrc.js \"**/*.ts\" --fix",
|
||||
"test": "jest --coverage",
|
||||
"pre-checkin": "npm run format && npm run build && npm test"
|
||||
"pre-checkin": "npm run format && npm run lint:fix && npm run build && npm test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -31,17 +33,26 @@
|
||||
"@actions/http-client": "^1.0.11",
|
||||
"@actions/io": "^1.0.2",
|
||||
"@actions/tool-cache": "^1.5.4",
|
||||
"semver": "^6.1.1"
|
||||
"semver": "^6.3.1",
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.0.2",
|
||||
"@types/node": "^16.11.25",
|
||||
"@types/semver": "^6.0.0",
|
||||
"@types/uuid": "^9.0.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.54.0",
|
||||
"@typescript-eslint/parser": "^5.54.0",
|
||||
"@vercel/ncc": "^0.33.4",
|
||||
"eslint": "^8.35.0",
|
||||
"eslint-config-prettier": "^8.6.0",
|
||||
"eslint-plugin-jest": "^27.2.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"jest": "^27.2.5",
|
||||
"jest-circus": "^27.2.5",
|
||||
"prettier": "^1.19.1",
|
||||
"jest-each": "^27.2.5",
|
||||
"prettier": "^2.8.4",
|
||||
"ts-jest": "^27.0.5",
|
||||
"typescript": "^3.8.3"
|
||||
"typescript": "^4.2.3"
|
||||
}
|
||||
}
|
||||
|
@ -29,16 +29,16 @@ function writeRegistryToFile(
|
||||
scope = '@' + scope;
|
||||
}
|
||||
if (scope) {
|
||||
scope = scope.toLowerCase();
|
||||
scope = scope.toLowerCase() + ':';
|
||||
}
|
||||
|
||||
core.debug(`Setting auth in ${fileLocation}`);
|
||||
let newContents: string = '';
|
||||
let newContents = '';
|
||||
if (fs.existsSync(fileLocation)) {
|
||||
const curContents: string = fs.readFileSync(fileLocation, 'utf8');
|
||||
curContents.split(os.EOL).forEach((line: string) => {
|
||||
// Add current contents unless they are setting the registry
|
||||
if (!line.toLowerCase().startsWith('registry')) {
|
||||
if (!line.toLowerCase().startsWith(`${scope}registry`)) {
|
||||
newContents += line + os.EOL;
|
||||
}
|
||||
});
|
||||
@ -46,10 +46,8 @@ function writeRegistryToFile(
|
||||
// Remove http: or https: from front of registry.
|
||||
const authString: string =
|
||||
registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}';
|
||||
const registryString: string = scope
|
||||
? `${scope}:registry=${registryUrl}`
|
||||
: `registry=${registryUrl}`;
|
||||
const alwaysAuthString: string = `always-auth=${alwaysAuth}`;
|
||||
const registryString = `${scope}registry=${registryUrl}`;
|
||||
const alwaysAuthString = `always-auth=${alwaysAuth}`;
|
||||
newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`;
|
||||
fs.writeFileSync(fileLocation, newContents);
|
||||
core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation);
|
||||
|
@ -4,16 +4,17 @@ import * as glob from '@actions/glob';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
import {State, Outputs} from './constants';
|
||||
import {State} from './constants';
|
||||
import {
|
||||
getCacheDirectoryPath,
|
||||
getCacheDirectories,
|
||||
getPackageManagerInfo,
|
||||
repoHasYarnBerryManagedDependencies,
|
||||
PackageManagerInfo
|
||||
} from './cache-utils';
|
||||
|
||||
export const restoreCache = async (
|
||||
packageManager: string,
|
||||
cacheDependencyPath?: string
|
||||
cacheDependencyPath: string
|
||||
) => {
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
@ -21,10 +22,11 @@ export const restoreCache = async (
|
||||
}
|
||||
const platform = process.env.RUNNER_OS;
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
const cachePaths = await getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
cacheDependencyPath
|
||||
);
|
||||
core.saveState(State.CachePaths, cachePaths);
|
||||
const lockFilePath = cacheDependencyPath
|
||||
? cacheDependencyPath
|
||||
: findLockFile(packageManagerInfo);
|
||||
@ -36,12 +38,26 @@ export const restoreCache = async (
|
||||
);
|
||||
}
|
||||
|
||||
const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`;
|
||||
const keyPrefix = `node-cache-${platform}-${packageManager}`;
|
||||
const primaryKey = `${keyPrefix}-${fileHash}`;
|
||||
core.debug(`primary key is ${primaryKey}`);
|
||||
|
||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const cacheKey = await cache.restoreCache([cachePath], primaryKey);
|
||||
const isManagedByYarnBerry = await repoHasYarnBerryManagedDependencies(
|
||||
packageManagerInfo,
|
||||
cacheDependencyPath
|
||||
);
|
||||
let cacheKey: string | undefined;
|
||||
if (isManagedByYarnBerry) {
|
||||
core.info(
|
||||
'All dependencies are managed locally by yarn3, the previous cache can be used'
|
||||
);
|
||||
cacheKey = await cache.restoreCache(cachePaths, primaryKey, [keyPrefix]);
|
||||
} else {
|
||||
cacheKey = await cache.restoreCache(cachePaths, primaryKey);
|
||||
}
|
||||
|
||||
core.setOutput('cache-hit', Boolean(cacheKey));
|
||||
|
||||
if (!cacheKey) {
|
||||
@ -54,8 +70,9 @@ export const restoreCache = async (
|
||||
};
|
||||
|
||||
const findLockFile = (packageManager: PackageManagerInfo) => {
|
||||
let lockFiles = packageManager.lockFilePatterns;
|
||||
const lockFiles = packageManager.lockFilePatterns;
|
||||
const workspace = process.env.GITHUB_WORKSPACE!;
|
||||
|
||||
const rootContent = fs.readdirSync(workspace);
|
||||
|
||||
const lockFile = lockFiles.find(item => rootContent.includes(item));
|
||||
|
@ -1,8 +1,10 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
import fs from 'fs';
|
||||
|
||||
import {State} from './constants';
|
||||
import {getCacheDirectoryPath, getPackageManagerInfo} from './cache-utils';
|
||||
import {getPackageManagerInfo} from './cache-utils';
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
@ -24,6 +26,10 @@ export async function run() {
|
||||
const cachePackages = async (packageManager: string) => {
|
||||
const state = core.getState(State.CacheMatchedKey);
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
let cachePaths = JSON.parse(
|
||||
core.getState(State.CachePaths) || '[]'
|
||||
) as string[];
|
||||
cachePaths = cachePaths.filter(fs.existsSync);
|
||||
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
@ -31,14 +37,12 @@ const cachePackages = async (packageManager: string) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
);
|
||||
|
||||
if (!fs.existsSync(cachePath)) {
|
||||
if (!cachePaths.length) {
|
||||
// TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?)
|
||||
// export declare function getInput(name: string, options?: InputOptions): string;
|
||||
const cacheDependencyPath = core.getInput('cache-dependency-path') || '';
|
||||
throw new Error(
|
||||
`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`
|
||||
`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`
|
||||
);
|
||||
}
|
||||
|
||||
@ -49,7 +53,7 @@ const cachePackages = async (packageManager: string) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const cacheId = await cache.saveCache([cachePath], primaryKey);
|
||||
const cacheId = await cache.saveCache(cachePaths, primaryKey);
|
||||
if (cacheId == -1) {
|
||||
return;
|
||||
}
|
||||
|
@ -1,40 +1,79 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
type SupportedPackageManagers = {
|
||||
[prop: string]: PackageManagerInfo;
|
||||
};
|
||||
import * as glob from '@actions/glob';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import {unique} from './util';
|
||||
|
||||
export interface PackageManagerInfo {
|
||||
name: string;
|
||||
lockFilePatterns: Array<string>;
|
||||
getCacheFolderCommand: string;
|
||||
getCacheFolderPath: (projectDir?: string) => Promise<string>;
|
||||
}
|
||||
|
||||
interface SupportedPackageManagers {
|
||||
npm: PackageManagerInfo;
|
||||
pnpm: PackageManagerInfo;
|
||||
yarn: PackageManagerInfo;
|
||||
}
|
||||
export const supportedPackageManagers: SupportedPackageManagers = {
|
||||
npm: {
|
||||
name: 'npm',
|
||||
lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'],
|
||||
getCacheFolderCommand: 'npm config get cache'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'npm config get cache',
|
||||
'Could not get npm cache folder path'
|
||||
)
|
||||
},
|
||||
pnpm: {
|
||||
name: 'pnpm',
|
||||
lockFilePatterns: ['pnpm-lock.yaml'],
|
||||
getCacheFolderCommand: 'pnpm store path --silent'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'pnpm store path --silent',
|
||||
'Could not get pnpm cache folder path'
|
||||
)
|
||||
},
|
||||
yarn1: {
|
||||
yarn: {
|
||||
name: 'yarn',
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn cache dir'
|
||||
},
|
||||
yarn2: {
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn config get cacheFolder'
|
||||
getCacheFolderPath: async projectDir => {
|
||||
const yarnVersion = await getCommandOutputNotEmpty(
|
||||
`yarn --version`,
|
||||
'Could not retrieve version of yarn',
|
||||
projectDir
|
||||
);
|
||||
|
||||
core.debug(
|
||||
`Consumed yarn version is ${yarnVersion} (working dir: "${
|
||||
projectDir || ''
|
||||
}")`
|
||||
);
|
||||
|
||||
const stdOut = yarnVersion.startsWith('1.')
|
||||
? await getCommandOutput('yarn cache dir', projectDir)
|
||||
: await getCommandOutput('yarn config get cacheFolder', projectDir);
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(
|
||||
`Could not get yarn cache folder path for ${projectDir}`
|
||||
);
|
||||
}
|
||||
return stdOut;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const getCommandOutput = async (toolCommand: string) => {
|
||||
export const getCommandOutput = async (
|
||||
toolCommand: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
let {stdout, stderr, exitCode} = await exec.getExecOutput(
|
||||
toolCommand,
|
||||
undefined,
|
||||
{ignoreReturnCode: true}
|
||||
{ignoreReturnCode: true, ...(cwd && {cwd})}
|
||||
);
|
||||
|
||||
if (exitCode) {
|
||||
@ -47,16 +86,15 @@ export const getCommandOutput = async (toolCommand: string) => {
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
const getPackageManagerVersion = async (
|
||||
packageManager: string,
|
||||
command: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(`${packageManager} ${command}`);
|
||||
|
||||
export const getCommandOutputNotEmpty = async (
|
||||
toolCommand: string,
|
||||
error: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
const stdOut = getCommandOutput(toolCommand, cwd);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not retrieve version of ${packageManager}`);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
return stdOut;
|
||||
};
|
||||
|
||||
@ -66,35 +104,191 @@ export const getPackageManagerInfo = async (packageManager: string) => {
|
||||
} else if (packageManager === 'pnpm') {
|
||||
return supportedPackageManagers.pnpm;
|
||||
} else if (packageManager === 'yarn') {
|
||||
const yarnVersion = await getPackageManagerVersion('yarn', '--version');
|
||||
|
||||
core.debug(`Consumed yarn version is ${yarnVersion}`);
|
||||
|
||||
if (yarnVersion.startsWith('1.')) {
|
||||
return supportedPackageManagers.yarn1;
|
||||
} else {
|
||||
return supportedPackageManagers.yarn2;
|
||||
}
|
||||
return supportedPackageManagers.yarn;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const getCacheDirectoryPath = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
packageManager: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(
|
||||
packageManagerInfo.getCacheFolderCommand
|
||||
);
|
||||
/**
|
||||
* getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache`
|
||||
* - first through `getCacheDirectories`
|
||||
* - second from `repoHasYarn3ManagedCache`
|
||||
*
|
||||
* it contains expensive IO operation and thus should be memoized
|
||||
*/
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get cache folder path for ${packageManager}`);
|
||||
let projectDirectoriesMemoized: string[] | null = null;
|
||||
/**
|
||||
* unit test must reset memoized variables
|
||||
*/
|
||||
export const resetProjectDirectoriesMemoized = () =>
|
||||
(projectDirectoriesMemoized = null);
|
||||
/**
|
||||
* Expands (converts) the string input `cache-dependency-path` to list of directories that
|
||||
* may be project roots
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of directories and possible
|
||||
*/
|
||||
const getProjectDirectoriesFromCacheDependencyPath = async (
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
if (projectDirectoriesMemoized !== null) {
|
||||
return projectDirectoriesMemoized;
|
||||
}
|
||||
|
||||
core.debug(`${packageManager} path is ${stdOut}`);
|
||||
const globber = await glob.create(cacheDependencyPath);
|
||||
const cacheDependenciesPaths = await globber.glob();
|
||||
|
||||
return stdOut.trim();
|
||||
const existingDirectories: string[] = cacheDependenciesPaths
|
||||
.map(path.dirname)
|
||||
.filter(unique())
|
||||
.map(dirName => fs.realpathSync(dirName))
|
||||
.filter(directory => fs.lstatSync(directory).isDirectory());
|
||||
|
||||
if (!existingDirectories.length)
|
||||
core.warning(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`
|
||||
);
|
||||
|
||||
projectDirectoriesMemoized = existingDirectories;
|
||||
return existingDirectories;
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the cache directories configured for the repo if cache-dependency-path is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesFromCacheDependencyPath = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
const projectDirectories = await getProjectDirectoriesFromCacheDependencyPath(
|
||||
cacheDependencyPath
|
||||
);
|
||||
const cacheFoldersPaths = await Promise.all(
|
||||
projectDirectories.map(async projectDirectory => {
|
||||
const cacheFolderPath = await packageManagerInfo.getCacheFolderPath(
|
||||
projectDirectory
|
||||
);
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`
|
||||
);
|
||||
return cacheFolderPath;
|
||||
})
|
||||
);
|
||||
// uniq in order to do not cache the same directories twice
|
||||
return cacheFoldersPaths.filter(unique());
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesForRootProject = async (
|
||||
packageManagerInfo: PackageManagerInfo
|
||||
): Promise<string[]> => {
|
||||
const cacheFolderPath = await packageManagerInfo.getCacheFolderPath();
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`
|
||||
);
|
||||
return [cacheFolderPath];
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to find the cache directories configured for the repo
|
||||
* currently it handles only the case of PM=yarn && cacheDependencyPath is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
export const getCacheDirectories = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
// For yarn, if cacheDependencyPath is set, ask information about cache folders in each project
|
||||
// folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488
|
||||
if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) {
|
||||
return getCacheDirectoriesFromCacheDependencyPath(
|
||||
packageManagerInfo,
|
||||
cacheDependencyPath
|
||||
);
|
||||
}
|
||||
return getCacheDirectoriesForRootProject(packageManagerInfo);
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to check if the directory is a yarn project configured to manage
|
||||
* obsolete dependencies in the local cache
|
||||
* @param directory - a path to the folder
|
||||
* @return - true if the directory's project is yarn managed
|
||||
* - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false
|
||||
* - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false
|
||||
* - if local cache is not explicitly enabled (not yarn3), return false
|
||||
* - return true otherwise
|
||||
*/
|
||||
const projectHasYarnBerryManagedDependencies = async (
|
||||
directory: string
|
||||
): Promise<boolean> => {
|
||||
const workDir = directory || process.env.GITHUB_WORKSPACE || '.';
|
||||
core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`);
|
||||
|
||||
// if .yarn/cache directory exists the cache is managed by version control system
|
||||
const yarnCacheFile = path.join(workDir, '.yarn', 'cache');
|
||||
if (
|
||||
fs.existsSync(yarnCacheFile) &&
|
||||
fs.lstatSync(yarnCacheFile).isDirectory()
|
||||
) {
|
||||
core.debug(
|
||||
`"${workDir}" has .yarn/cache - dependencies are kept in the repository`
|
||||
);
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
// NOTE: yarn1 returns 'undefined' with return code = 0
|
||||
const enableGlobalCache = await getCommandOutput(
|
||||
'yarn config get enableGlobalCache',
|
||||
workDir
|
||||
);
|
||||
// only local cache is not managed by yarn
|
||||
const managed = enableGlobalCache.includes('false');
|
||||
if (managed) {
|
||||
core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`);
|
||||
return true;
|
||||
} else {
|
||||
core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to report the repo contains Yarn managed projects
|
||||
* @param packageManagerInfo - used to make sure current package manager is yarn
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return - true if all project directories configured to be Yarn managed
|
||||
*/
|
||||
export const repoHasYarnBerryManagedDependencies = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<boolean> => {
|
||||
if (packageManagerInfo.name !== 'yarn') return false;
|
||||
|
||||
const yarnDirs = cacheDependencyPath
|
||||
? await getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath)
|
||||
: [''];
|
||||
|
||||
const isManagedList = await Promise.all(
|
||||
yarnDirs.map(projectHasYarnBerryManagedDependencies)
|
||||
);
|
||||
|
||||
return isManagedList.every(Boolean);
|
||||
};
|
||||
|
||||
export function isGhes(): boolean {
|
||||
@ -105,19 +299,18 @@ export function isGhes(): boolean {
|
||||
}
|
||||
|
||||
export function isCacheFeatureAvailable(): boolean {
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
if (isGhes()) {
|
||||
throw new Error(
|
||||
'Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'
|
||||
);
|
||||
} else {
|
||||
core.warning(
|
||||
'The runner was not able to contact the cache service. Caching will be skipped'
|
||||
);
|
||||
}
|
||||
if (cache.isFeatureAvailable()) return true;
|
||||
|
||||
if (isGhes()) {
|
||||
core.warning(
|
||||
'Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
core.warning(
|
||||
'The runner was not able to contact the cache service. Caching will be skipped'
|
||||
);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
@ -6,7 +6,8 @@ export enum LockType {
|
||||
|
||||
export enum State {
|
||||
CachePrimaryKey = 'CACHE_KEY',
|
||||
CacheMatchedKey = 'CACHE_RESULT'
|
||||
CacheMatchedKey = 'CACHE_RESULT',
|
||||
CachePaths = 'CACHE_PATHS'
|
||||
}
|
||||
|
||||
export enum Outputs {
|
||||
|
53
src/distributions/base-distribution-prerelease.ts
Normal file
53
src/distributions/base-distribution-prerelease.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import * as tc from '@actions/tool-cache';
|
||||
|
||||
import semver from 'semver';
|
||||
|
||||
import BaseDistribution from './base-distribution';
|
||||
import {NodeInputs} from './base-models';
|
||||
|
||||
export default abstract class BasePrereleaseNodejs extends BaseDistribution {
|
||||
protected abstract distribution: string;
|
||||
constructor(nodeInfo: NodeInputs) {
|
||||
super(nodeInfo);
|
||||
}
|
||||
|
||||
protected findVersionInHostedToolCacheDirectory(): string {
|
||||
let toolPath = '';
|
||||
const localVersionPaths = tc
|
||||
.findAllVersions('node', this.nodeInfo.arch)
|
||||
.filter(i => {
|
||||
const prerelease = semver.prerelease(i);
|
||||
if (!prerelease) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return prerelease[0].includes(this.distribution);
|
||||
});
|
||||
localVersionPaths.sort(semver.rcompare);
|
||||
const localVersion = this.evaluateVersions(localVersionPaths);
|
||||
if (localVersion) {
|
||||
toolPath = tc.find('node', localVersion, this.nodeInfo.arch);
|
||||
}
|
||||
|
||||
return toolPath;
|
||||
}
|
||||
|
||||
protected validRange(versionSpec: string) {
|
||||
let range: string;
|
||||
const [raw, prerelease] = this.splitVersionSpec(versionSpec);
|
||||
const isValidVersion = semver.valid(raw);
|
||||
const rawVersion = (isValidVersion ? raw : semver.coerce(raw))!;
|
||||
|
||||
if (prerelease !== this.distribution) {
|
||||
range = versionSpec;
|
||||
} else {
|
||||
range = `${semver.validRange(`^${rawVersion}-${this.distribution}`)}-0`;
|
||||
}
|
||||
|
||||
return {range, options: {includePrerelease: !isValidVersion}};
|
||||
}
|
||||
|
||||
protected splitVersionSpec(versionSpec: string) {
|
||||
return versionSpec.split(/-(.*)/s);
|
||||
}
|
||||
}
|
295
src/distributions/base-distribution.ts
Normal file
295
src/distributions/base-distribution.ts
Normal file
@ -0,0 +1,295 @@
|
||||
import {v4 as uuidv4} from 'uuid';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import * as hc from '@actions/http-client';
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
|
||||
import semver from 'semver';
|
||||
import * as assert from 'assert';
|
||||
|
||||
import * as path from 'path';
|
||||
import os from 'os';
|
||||
import fs from 'fs';
|
||||
|
||||
import {NodeInputs, INodeVersion, INodeVersionInfo} from './base-models';
|
||||
|
||||
export default abstract class BaseDistribution {
|
||||
protected httpClient: hc.HttpClient;
|
||||
protected osPlat = os.platform();
|
||||
|
||||
constructor(protected nodeInfo: NodeInputs) {
|
||||
this.httpClient = new hc.HttpClient('setup-node', [], {
|
||||
allowRetries: true,
|
||||
maxRetries: 3
|
||||
});
|
||||
}
|
||||
|
||||
protected abstract getDistributionUrl(): string;
|
||||
|
||||
public async setupNodeJs() {
|
||||
let nodeJsVersions: INodeVersion[] | undefined;
|
||||
if (this.nodeInfo.checkLatest) {
|
||||
const evaluatedVersion = await this.findVersionInDist(nodeJsVersions);
|
||||
this.nodeInfo.versionSpec = evaluatedVersion;
|
||||
}
|
||||
|
||||
let toolPath = this.findVersionInHostedToolCacheDirectory();
|
||||
if (toolPath) {
|
||||
core.info(`Found in cache @ ${toolPath}`);
|
||||
} else {
|
||||
const evaluatedVersion = await this.findVersionInDist(nodeJsVersions);
|
||||
const toolName = this.getNodejsDistInfo(evaluatedVersion);
|
||||
toolPath = await this.downloadNodejs(toolName);
|
||||
}
|
||||
|
||||
if (this.osPlat != 'win32') {
|
||||
toolPath = path.join(toolPath, 'bin');
|
||||
}
|
||||
|
||||
core.addPath(toolPath);
|
||||
}
|
||||
|
||||
protected async findVersionInDist(nodeJsVersions?: INodeVersion[]) {
|
||||
if (!nodeJsVersions) {
|
||||
nodeJsVersions = await this.getNodeJsVersions();
|
||||
}
|
||||
const versions = this.filterVersions(nodeJsVersions);
|
||||
const evaluatedVersion = this.evaluateVersions(versions);
|
||||
if (!evaluatedVersion) {
|
||||
throw new Error(
|
||||
`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`
|
||||
);
|
||||
}
|
||||
|
||||
return evaluatedVersion;
|
||||
}
|
||||
|
||||
protected evaluateVersions(versions: string[]): string {
|
||||
let version = '';
|
||||
|
||||
const {range, options} = this.validRange(this.nodeInfo.versionSpec);
|
||||
|
||||
core.debug(`evaluating ${versions.length} versions`);
|
||||
|
||||
for (const potential of versions) {
|
||||
const satisfied: boolean = semver.satisfies(potential, range, options);
|
||||
if (satisfied) {
|
||||
version = potential;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (version) {
|
||||
core.debug(`matched: ${version}`);
|
||||
} else {
|
||||
core.debug('match not found');
|
||||
}
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
protected findVersionInHostedToolCacheDirectory() {
|
||||
return tc.find(
|
||||
'node',
|
||||
this.nodeInfo.versionSpec,
|
||||
this.translateArchToDistUrl(this.nodeInfo.arch)
|
||||
);
|
||||
}
|
||||
|
||||
protected async getNodeJsVersions(): Promise<INodeVersion[]> {
|
||||
const initialUrl = this.getDistributionUrl();
|
||||
const dataUrl = `${initialUrl}/index.json`;
|
||||
|
||||
const response = await this.httpClient.getJson<INodeVersion[]>(dataUrl);
|
||||
return response.result || [];
|
||||
}
|
||||
|
||||
protected getNodejsDistInfo(version: string) {
|
||||
const osArch: string = this.translateArchToDistUrl(this.nodeInfo.arch);
|
||||
version = semver.clean(version) || '';
|
||||
const fileName: string =
|
||||
this.osPlat == 'win32'
|
||||
? `node-v${version}-win-${osArch}`
|
||||
: `node-v${version}-${this.osPlat}-${osArch}`;
|
||||
const urlFileName: string =
|
||||
this.osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`;
|
||||
const initialUrl = this.getDistributionUrl();
|
||||
const url = `${initialUrl}/v${version}/${urlFileName}`;
|
||||
|
||||
return <INodeVersionInfo>{
|
||||
downloadUrl: url,
|
||||
resolvedVersion: version,
|
||||
arch: osArch,
|
||||
fileName: fileName
|
||||
};
|
||||
}
|
||||
|
||||
protected async downloadNodejs(info: INodeVersionInfo) {
|
||||
let downloadPath = '';
|
||||
core.info(
|
||||
`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`
|
||||
);
|
||||
try {
|
||||
downloadPath = await tc.downloadTool(info.downloadUrl);
|
||||
} catch (err) {
|
||||
if (
|
||||
err instanceof tc.HTTPError &&
|
||||
err.httpStatusCode == 404 &&
|
||||
this.osPlat == 'win32'
|
||||
) {
|
||||
return await this.acquireWindowsNodeFromFallbackLocation(
|
||||
info.resolvedVersion,
|
||||
info.arch
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
|
||||
const toolPath = await this.extractArchive(downloadPath, info);
|
||||
core.info('Done');
|
||||
|
||||
return toolPath;
|
||||
}
|
||||
|
||||
protected validRange(versionSpec: string) {
|
||||
let options: semver.Options | undefined;
|
||||
const c = semver.clean(versionSpec) || '';
|
||||
const valid = semver.valid(c) ?? versionSpec;
|
||||
|
||||
return {range: valid, options};
|
||||
}
|
||||
|
||||
protected async acquireWindowsNodeFromFallbackLocation(
|
||||
version: string,
|
||||
arch: string = os.arch()
|
||||
): Promise<string> {
|
||||
const initialUrl = this.getDistributionUrl();
|
||||
const osArch: string = this.translateArchToDistUrl(arch);
|
||||
|
||||
// Create temporary folder to download to
|
||||
const tempDownloadFolder = `temp_${uuidv4()}`;
|
||||
const tempDirectory = process.env['RUNNER_TEMP'] || '';
|
||||
assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
|
||||
const tempDir: string = path.join(tempDirectory, tempDownloadFolder);
|
||||
await io.mkdirP(tempDir);
|
||||
let exeUrl: string;
|
||||
let libUrl: string;
|
||||
try {
|
||||
exeUrl = `${initialUrl}/v${version}/win-${osArch}/node.exe`;
|
||||
libUrl = `${initialUrl}/v${version}/win-${osArch}/node.lib`;
|
||||
|
||||
core.info(`Downloading only node binary from ${exeUrl}`);
|
||||
|
||||
const exePath = await tc.downloadTool(exeUrl);
|
||||
await io.cp(exePath, path.join(tempDir, 'node.exe'));
|
||||
const libPath = await tc.downloadTool(libUrl);
|
||||
await io.cp(libPath, path.join(tempDir, 'node.lib'));
|
||||
} catch (err) {
|
||||
if (err instanceof tc.HTTPError && err.httpStatusCode == 404) {
|
||||
exeUrl = `${initialUrl}/v${version}/node.exe`;
|
||||
libUrl = `${initialUrl}/v${version}/node.lib`;
|
||||
|
||||
const exePath = await tc.downloadTool(exeUrl);
|
||||
await io.cp(exePath, path.join(tempDir, 'node.exe'));
|
||||
const libPath = await tc.downloadTool(libUrl);
|
||||
await io.cp(libPath, path.join(tempDir, 'node.lib'));
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const toolPath = await tc.cacheDir(tempDir, 'node', version, arch);
|
||||
|
||||
return toolPath;
|
||||
}
|
||||
|
||||
protected async extractArchive(
|
||||
downloadPath: string,
|
||||
info: INodeVersionInfo | null
|
||||
) {
|
||||
//
|
||||
// Extract
|
||||
//
|
||||
core.info('Extracting ...');
|
||||
let extPath: string;
|
||||
info = info || ({} as INodeVersionInfo); // satisfy compiler, never null when reaches here
|
||||
if (this.osPlat == 'win32') {
|
||||
const _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe');
|
||||
extPath = await tc.extract7z(downloadPath, undefined, _7zPath);
|
||||
// 7z extracts to folder matching file name
|
||||
const nestedPath = path.join(
|
||||
extPath,
|
||||
path.basename(info.fileName, '.7z')
|
||||
);
|
||||
if (fs.existsSync(nestedPath)) {
|
||||
extPath = nestedPath;
|
||||
}
|
||||
} else {
|
||||
extPath = await tc.extractTar(downloadPath, undefined, [
|
||||
'xz',
|
||||
'--strip',
|
||||
'1'
|
||||
]);
|
||||
}
|
||||
|
||||
//
|
||||
// Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded
|
||||
//
|
||||
core.info('Adding to the cache ...');
|
||||
const toolPath = await tc.cacheDir(
|
||||
extPath,
|
||||
'node',
|
||||
info.resolvedVersion,
|
||||
info.arch
|
||||
);
|
||||
|
||||
return toolPath;
|
||||
}
|
||||
|
||||
protected getDistFileName(): string {
|
||||
const osArch: string = this.translateArchToDistUrl(this.nodeInfo.arch);
|
||||
|
||||
// node offers a json list of versions
|
||||
let dataFileName: string;
|
||||
switch (this.osPlat) {
|
||||
case 'linux':
|
||||
dataFileName = `linux-${osArch}`;
|
||||
break;
|
||||
case 'darwin':
|
||||
dataFileName = `osx-${osArch}-tar`;
|
||||
break;
|
||||
case 'win32':
|
||||
dataFileName = `win-${osArch}-exe`;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected OS '${this.osPlat}'`);
|
||||
}
|
||||
|
||||
return dataFileName;
|
||||
}
|
||||
|
||||
protected filterVersions(nodeJsVersions: INodeVersion[]) {
|
||||
const versions: string[] = [];
|
||||
|
||||
const dataFileName = this.getDistFileName();
|
||||
|
||||
nodeJsVersions.forEach((nodeVersion: INodeVersion) => {
|
||||
// ensure this version supports your os and platform
|
||||
if (nodeVersion.files.indexOf(dataFileName) >= 0) {
|
||||
versions.push(nodeVersion.version);
|
||||
}
|
||||
});
|
||||
|
||||
return versions.sort(semver.rcompare);
|
||||
}
|
||||
|
||||
protected translateArchToDistUrl(arch: string): string {
|
||||
switch (arch) {
|
||||
case 'arm':
|
||||
return 'armv7l';
|
||||
default:
|
||||
return arch;
|
||||
}
|
||||
}
|
||||
}
|
19
src/distributions/base-models.ts
Normal file
19
src/distributions/base-models.ts
Normal file
@ -0,0 +1,19 @@
|
||||
export interface NodeInputs {
|
||||
versionSpec: string;
|
||||
arch: string;
|
||||
auth?: string;
|
||||
checkLatest: boolean;
|
||||
stable: boolean;
|
||||
}
|
||||
|
||||
export interface INodeVersionInfo {
|
||||
downloadUrl: string;
|
||||
resolvedVersion: string;
|
||||
arch: string;
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
export interface INodeVersion {
|
||||
version: string;
|
||||
files: string[];
|
||||
}
|
31
src/distributions/installer-factory.ts
Normal file
31
src/distributions/installer-factory.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import BaseDistribution from './base-distribution';
|
||||
import {NodeInputs} from './base-models';
|
||||
import NightlyNodejs from './nightly/nightly_builds';
|
||||
import OfficialBuilds from './official_builds/official_builds';
|
||||
import RcBuild from './rc/rc_builds';
|
||||
import CanaryBuild from './v8-canary/canary_builds';
|
||||
|
||||
enum Distributions {
|
||||
DEFAULT = '',
|
||||
CANARY = 'v8-canary',
|
||||
NIGHTLY = 'nightly',
|
||||
RC = 'rc'
|
||||
}
|
||||
|
||||
export function getNodejsDistribution(
|
||||
installerOptions: NodeInputs
|
||||
): BaseDistribution {
|
||||
const versionSpec = installerOptions.versionSpec;
|
||||
let distribution: BaseDistribution;
|
||||
if (versionSpec.includes(Distributions.NIGHTLY)) {
|
||||
distribution = new NightlyNodejs(installerOptions);
|
||||
} else if (versionSpec.includes(Distributions.CANARY)) {
|
||||
distribution = new CanaryBuild(installerOptions);
|
||||
} else if (versionSpec.includes(Distributions.RC)) {
|
||||
distribution = new RcBuild(installerOptions);
|
||||
} else {
|
||||
distribution = new OfficialBuilds(installerOptions);
|
||||
}
|
||||
|
||||
return distribution;
|
||||
}
|
13
src/distributions/nightly/nightly_builds.ts
Normal file
13
src/distributions/nightly/nightly_builds.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import BasePrereleaseNodejs from '../base-distribution-prerelease';
|
||||
import {NodeInputs} from '../base-models';
|
||||
|
||||
export default class NightlyNodejs extends BasePrereleaseNodejs {
|
||||
protected distribution = 'nightly';
|
||||
constructor(nodeInfo: NodeInputs) {
|
||||
super(nodeInfo);
|
||||
}
|
||||
|
||||
protected getDistributionUrl(): string {
|
||||
return 'https://nodejs.org/download/nightly';
|
||||
}
|
||||
}
|
258
src/distributions/official_builds/official_builds.ts
Normal file
258
src/distributions/official_builds/official_builds.ts
Normal file
@ -0,0 +1,258 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import path from 'path';
|
||||
|
||||
import BaseDistribution from '../base-distribution';
|
||||
import {NodeInputs, INodeVersion, INodeVersionInfo} from '../base-models';
|
||||
|
||||
interface INodeRelease extends tc.IToolRelease {
|
||||
lts?: string;
|
||||
}
|
||||
|
||||
export default class OfficialBuilds extends BaseDistribution {
|
||||
constructor(nodeInfo: NodeInputs) {
|
||||
super(nodeInfo);
|
||||
}
|
||||
|
||||
public async setupNodeJs() {
|
||||
let manifest: tc.IToolRelease[] | undefined;
|
||||
let nodeJsVersions: INodeVersion[] | undefined;
|
||||
const osArch = this.translateArchToDistUrl(this.nodeInfo.arch);
|
||||
if (this.isLtsAlias(this.nodeInfo.versionSpec)) {
|
||||
core.info('Attempt to resolve LTS alias from manifest...');
|
||||
|
||||
// No try-catch since it's not possible to resolve LTS alias without manifest
|
||||
manifest = await this.getManifest();
|
||||
|
||||
this.nodeInfo.versionSpec = this.resolveLtsAliasFromManifest(
|
||||
this.nodeInfo.versionSpec,
|
||||
this.nodeInfo.stable,
|
||||
manifest
|
||||
);
|
||||
}
|
||||
|
||||
if (this.isLatestSyntax(this.nodeInfo.versionSpec)) {
|
||||
nodeJsVersions = await this.getNodeJsVersions();
|
||||
const versions = this.filterVersions(nodeJsVersions);
|
||||
this.nodeInfo.versionSpec = this.evaluateVersions(versions);
|
||||
|
||||
core.info('getting latest node version...');
|
||||
}
|
||||
|
||||
if (this.nodeInfo.checkLatest) {
|
||||
core.info('Attempt to resolve the latest version from manifest...');
|
||||
const resolvedVersion = await this.resolveVersionFromManifest(
|
||||
this.nodeInfo.versionSpec,
|
||||
this.nodeInfo.stable,
|
||||
osArch,
|
||||
manifest
|
||||
);
|
||||
if (resolvedVersion) {
|
||||
this.nodeInfo.versionSpec = resolvedVersion;
|
||||
core.info(`Resolved as '${resolvedVersion}'`);
|
||||
} else {
|
||||
core.info(
|
||||
`Failed to resolve version ${this.nodeInfo.versionSpec} from manifest`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let toolPath = this.findVersionInHostedToolCacheDirectory();
|
||||
|
||||
if (toolPath) {
|
||||
core.info(`Found in cache @ ${toolPath}`);
|
||||
} else {
|
||||
let downloadPath = '';
|
||||
try {
|
||||
core.info(`Attempting to download ${this.nodeInfo.versionSpec}...`);
|
||||
|
||||
const versionInfo = await this.getInfoFromManifest(
|
||||
this.nodeInfo.versionSpec,
|
||||
this.nodeInfo.stable,
|
||||
osArch,
|
||||
manifest
|
||||
);
|
||||
if (versionInfo) {
|
||||
core.info(
|
||||
`Acquiring ${versionInfo.resolvedVersion} - ${versionInfo.arch} from ${versionInfo.downloadUrl}`
|
||||
);
|
||||
downloadPath = await tc.downloadTool(
|
||||
versionInfo.downloadUrl,
|
||||
undefined,
|
||||
this.nodeInfo.auth
|
||||
);
|
||||
|
||||
if (downloadPath) {
|
||||
toolPath = await this.extractArchive(downloadPath, versionInfo);
|
||||
}
|
||||
} else {
|
||||
core.info(
|
||||
'Not found in manifest. Falling back to download directly from Node'
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
// Rate limit?
|
||||
if (
|
||||
err instanceof tc.HTTPError &&
|
||||
(err.httpStatusCode === 403 || err.httpStatusCode === 429)
|
||||
) {
|
||||
core.info(
|
||||
`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`
|
||||
);
|
||||
} else {
|
||||
core.info(err.message);
|
||||
}
|
||||
core.debug(err.stack);
|
||||
core.info('Falling back to download directly from Node');
|
||||
}
|
||||
|
||||
if (!toolPath) {
|
||||
const nodeJsVersions = await this.getNodeJsVersions();
|
||||
const versions = this.filterVersions(nodeJsVersions);
|
||||
const evaluatedVersion = this.evaluateVersions(versions);
|
||||
if (!evaluatedVersion) {
|
||||
throw new Error(
|
||||
`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`
|
||||
);
|
||||
}
|
||||
const toolName = this.getNodejsDistInfo(evaluatedVersion);
|
||||
toolPath = await this.downloadNodejs(toolName);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.osPlat != 'win32') {
|
||||
toolPath = path.join(toolPath, 'bin');
|
||||
}
|
||||
|
||||
core.addPath(toolPath);
|
||||
}
|
||||
|
||||
protected evaluateVersions(versions: string[]): string {
|
||||
let version = '';
|
||||
|
||||
if (this.isLatestSyntax(this.nodeInfo.versionSpec)) {
|
||||
core.info(`getting latest node version...`);
|
||||
return versions[0];
|
||||
}
|
||||
|
||||
version = super.evaluateVersions(versions);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
protected getDistributionUrl(): string {
|
||||
return `https://nodejs.org/dist`;
|
||||
}
|
||||
|
||||
private getManifest(): Promise<tc.IToolRelease[]> {
|
||||
core.debug('Getting manifest from actions/node-versions@main');
|
||||
return tc.getManifestFromRepo(
|
||||
'actions',
|
||||
'node-versions',
|
||||
this.nodeInfo.auth,
|
||||
'main'
|
||||
);
|
||||
}
|
||||
|
||||
private resolveLtsAliasFromManifest(
|
||||
versionSpec: string,
|
||||
stable: boolean,
|
||||
manifest: INodeRelease[]
|
||||
): string {
|
||||
const alias = versionSpec.split('lts/')[1]?.toLowerCase();
|
||||
|
||||
if (!alias) {
|
||||
throw new Error(
|
||||
`Unable to parse LTS alias for Node version '${versionSpec}'`
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`);
|
||||
|
||||
// Supported formats are `lts/<alias>`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest.
|
||||
const n = Number(alias);
|
||||
const aliases = Object.fromEntries(
|
||||
manifest
|
||||
.filter(x => x.lts && x.stable === stable)
|
||||
.map(x => [x.lts!.toLowerCase(), x])
|
||||
.reverse()
|
||||
);
|
||||
const numbered = Object.values(aliases);
|
||||
const release =
|
||||
alias === '*'
|
||||
? numbered[numbered.length - 1]
|
||||
: n < 0
|
||||
? numbered[numbered.length - 1 + n]
|
||||
: aliases[alias];
|
||||
|
||||
if (!release) {
|
||||
throw new Error(
|
||||
`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(
|
||||
`Found LTS release '${release.version}' for Node version '${versionSpec}'`
|
||||
);
|
||||
|
||||
return release.version.split('.')[0];
|
||||
}
|
||||
|
||||
private async resolveVersionFromManifest(
|
||||
versionSpec: string,
|
||||
stable: boolean,
|
||||
osArch: string,
|
||||
manifest: tc.IToolRelease[] | undefined
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const info = await this.getInfoFromManifest(
|
||||
versionSpec,
|
||||
stable,
|
||||
osArch,
|
||||
manifest
|
||||
);
|
||||
return info?.resolvedVersion;
|
||||
} catch (err) {
|
||||
core.info('Unable to resolve version from manifest...');
|
||||
core.debug(err.message);
|
||||
}
|
||||
}
|
||||
|
||||
private async getInfoFromManifest(
|
||||
versionSpec: string,
|
||||
stable: boolean,
|
||||
osArch: string,
|
||||
manifest: tc.IToolRelease[] | undefined
|
||||
): Promise<INodeVersionInfo | null> {
|
||||
let info: INodeVersionInfo | null = null;
|
||||
if (!manifest) {
|
||||
core.debug('No manifest cached');
|
||||
manifest = await this.getManifest();
|
||||
}
|
||||
|
||||
const rel = await tc.findFromManifest(
|
||||
versionSpec,
|
||||
stable,
|
||||
manifest,
|
||||
osArch
|
||||
);
|
||||
|
||||
if (rel && rel.files.length > 0) {
|
||||
info = <INodeVersionInfo>{};
|
||||
info.resolvedVersion = rel.version;
|
||||
info.arch = rel.files[0].arch;
|
||||
info.downloadUrl = rel.files[0].download_url;
|
||||
info.fileName = rel.files[0].filename;
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
private isLtsAlias(versionSpec: string): boolean {
|
||||
return versionSpec.startsWith('lts/');
|
||||
}
|
||||
|
||||
private isLatestSyntax(versionSpec): boolean {
|
||||
return ['current', 'latest', 'node'].includes(versionSpec);
|
||||
}
|
||||
}
|
12
src/distributions/rc/rc_builds.ts
Normal file
12
src/distributions/rc/rc_builds.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import BaseDistribution from '../base-distribution';
|
||||
import {NodeInputs} from '../base-models';
|
||||
|
||||
export default class RcBuild extends BaseDistribution {
|
||||
constructor(nodeInfo: NodeInputs) {
|
||||
super(nodeInfo);
|
||||
}
|
||||
|
||||
getDistributionUrl(): string {
|
||||
return 'https://nodejs.org/download/rc';
|
||||
}
|
||||
}
|
13
src/distributions/v8-canary/canary_builds.ts
Normal file
13
src/distributions/v8-canary/canary_builds.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import BasePrereleaseNodejs from '../base-distribution-prerelease';
|
||||
import {NodeInputs} from '../base-models';
|
||||
|
||||
export default class CanaryBuild extends BasePrereleaseNodejs {
|
||||
protected distribution = 'v8-canary';
|
||||
constructor(nodeInfo: NodeInputs) {
|
||||
super(nodeInfo);
|
||||
}
|
||||
|
||||
protected getDistributionUrl(): string {
|
||||
return 'https://nodejs.org/download/v8-canary';
|
||||
}
|
||||
}
|
522
src/installer.ts
522
src/installer.ts
@ -1,522 +0,0 @@
|
||||
import os = require('os');
|
||||
import * as assert from 'assert';
|
||||
import * as core from '@actions/core';
|
||||
import * as hc from '@actions/http-client';
|
||||
import * as io from '@actions/io';
|
||||
import * as tc from '@actions/tool-cache';
|
||||
import * as path from 'path';
|
||||
import * as semver from 'semver';
|
||||
import fs = require('fs');
|
||||
|
||||
//
|
||||
// Node versions interface
|
||||
// see https://nodejs.org/dist/index.json
|
||||
//
|
||||
export interface INodeVersion {
|
||||
version: string;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
interface INodeVersionInfo {
|
||||
downloadUrl: string;
|
||||
resolvedVersion: string;
|
||||
arch: string;
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
interface INodeRelease extends tc.IToolRelease {
|
||||
lts?: string;
|
||||
}
|
||||
|
||||
export async function getNode(
|
||||
versionSpec: string,
|
||||
stable: boolean,
|
||||
checkLatest: boolean,
|
||||
auth: string | undefined,
|
||||
arch: string = os.arch()
|
||||
) {
|
||||
// Store manifest data to avoid multiple calls
|
||||
let manifest: INodeRelease[] | undefined;
|
||||
let nodeVersions: INodeVersion[] | undefined;
|
||||
let osPlat: string = os.platform();
|
||||
let osArch: string = translateArchToDistUrl(arch);
|
||||
|
||||
if (isLtsAlias(versionSpec)) {
|
||||
core.info('Attempt to resolve LTS alias from manifest...');
|
||||
|
||||
// No try-catch since it's not possible to resolve LTS alias without manifest
|
||||
manifest = await getManifest(auth);
|
||||
|
||||
versionSpec = resolveLtsAliasFromManifest(versionSpec, stable, manifest);
|
||||
}
|
||||
|
||||
if (isLatestSyntax(versionSpec)) {
|
||||
nodeVersions = await getVersionsFromDist();
|
||||
versionSpec = await queryDistForMatch(versionSpec, arch, nodeVersions);
|
||||
core.info(`getting latest node version...`);
|
||||
}
|
||||
|
||||
if (checkLatest) {
|
||||
core.info('Attempt to resolve the latest version from manifest...');
|
||||
const resolvedVersion = await resolveVersionFromManifest(
|
||||
versionSpec,
|
||||
stable,
|
||||
auth,
|
||||
osArch,
|
||||
manifest
|
||||
);
|
||||
if (resolvedVersion) {
|
||||
versionSpec = resolvedVersion;
|
||||
core.info(`Resolved as '${versionSpec}'`);
|
||||
} else {
|
||||
core.info(`Failed to resolve version ${versionSpec} from manifest`);
|
||||
}
|
||||
}
|
||||
|
||||
// check cache
|
||||
let toolPath: string;
|
||||
toolPath = tc.find('node', versionSpec, osArch);
|
||||
|
||||
// If not found in cache, download
|
||||
if (toolPath) {
|
||||
core.info(`Found in cache @ ${toolPath}`);
|
||||
} else {
|
||||
core.info(`Attempting to download ${versionSpec}...`);
|
||||
let downloadPath = '';
|
||||
let info: INodeVersionInfo | null = null;
|
||||
|
||||
//
|
||||
// Try download from internal distribution (popular versions only)
|
||||
//
|
||||
try {
|
||||
info = await getInfoFromManifest(
|
||||
versionSpec,
|
||||
stable,
|
||||
auth,
|
||||
osArch,
|
||||
manifest
|
||||
);
|
||||
if (info) {
|
||||
core.info(
|
||||
`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`
|
||||
);
|
||||
downloadPath = await tc.downloadTool(info.downloadUrl, undefined, auth);
|
||||
} else {
|
||||
core.info(
|
||||
'Not found in manifest. Falling back to download directly from Node'
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
// Rate limit?
|
||||
if (
|
||||
err instanceof tc.HTTPError &&
|
||||
(err.httpStatusCode === 403 || err.httpStatusCode === 429)
|
||||
) {
|
||||
core.info(
|
||||
`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`
|
||||
);
|
||||
} else {
|
||||
core.info(err.message);
|
||||
}
|
||||
core.debug(err.stack);
|
||||
core.info('Falling back to download directly from Node');
|
||||
}
|
||||
|
||||
//
|
||||
// Download from nodejs.org
|
||||
//
|
||||
if (!downloadPath) {
|
||||
info = await getInfoFromDist(versionSpec, arch, nodeVersions);
|
||||
if (!info) {
|
||||
throw new Error(
|
||||
`Unable to find Node version '${versionSpec}' for platform ${osPlat} and architecture ${osArch}.`
|
||||
);
|
||||
}
|
||||
|
||||
core.info(
|
||||
`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`
|
||||
);
|
||||
try {
|
||||
downloadPath = await tc.downloadTool(info.downloadUrl);
|
||||
} catch (err) {
|
||||
if (err instanceof tc.HTTPError && err.httpStatusCode == 404) {
|
||||
return await acquireNodeFromFallbackLocation(
|
||||
info.resolvedVersion,
|
||||
info.arch
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Extract
|
||||
//
|
||||
core.info('Extracting ...');
|
||||
let extPath: string;
|
||||
info = info || ({} as INodeVersionInfo); // satisfy compiler, never null when reaches here
|
||||
if (osPlat == 'win32') {
|
||||
let _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe');
|
||||
extPath = await tc.extract7z(downloadPath, undefined, _7zPath);
|
||||
// 7z extracts to folder matching file name
|
||||
let nestedPath = path.join(extPath, path.basename(info.fileName, '.7z'));
|
||||
if (fs.existsSync(nestedPath)) {
|
||||
extPath = nestedPath;
|
||||
}
|
||||
} else {
|
||||
extPath = await tc.extractTar(downloadPath, undefined, [
|
||||
'xz',
|
||||
'--strip',
|
||||
'1'
|
||||
]);
|
||||
}
|
||||
|
||||
//
|
||||
// Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded
|
||||
//
|
||||
core.info('Adding to the cache ...');
|
||||
toolPath = await tc.cacheDir(
|
||||
extPath,
|
||||
'node',
|
||||
info.resolvedVersion,
|
||||
info.arch
|
||||
);
|
||||
core.info('Done');
|
||||
}
|
||||
|
||||
//
|
||||
// a tool installer initimately knows details about the layout of that tool
|
||||
// for example, node binary is in the bin folder after the extract on Mac/Linux.
|
||||
// layouts could change by version, by platform etc... but that's the tool installers job
|
||||
//
|
||||
if (osPlat != 'win32') {
|
||||
toolPath = path.join(toolPath, 'bin');
|
||||
}
|
||||
|
||||
//
|
||||
// prepend the tools path. instructs the agent to prepend for future tasks
|
||||
core.addPath(toolPath);
|
||||
}
|
||||
|
||||
function isLtsAlias(versionSpec: string): boolean {
|
||||
return versionSpec.startsWith('lts/');
|
||||
}
|
||||
|
||||
function getManifest(auth: string | undefined): Promise<tc.IToolRelease[]> {
|
||||
core.debug('Getting manifest from actions/node-versions@main');
|
||||
return tc.getManifestFromRepo('actions', 'node-versions', auth, 'main');
|
||||
}
|
||||
|
||||
function resolveLtsAliasFromManifest(
|
||||
versionSpec: string,
|
||||
stable: boolean,
|
||||
manifest: INodeRelease[]
|
||||
): string {
|
||||
const alias = versionSpec.split('lts/')[1]?.toLowerCase();
|
||||
|
||||
if (!alias) {
|
||||
throw new Error(
|
||||
`Unable to parse LTS alias for Node version '${versionSpec}'`
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`);
|
||||
|
||||
// Supported formats are `lts/<alias>`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest.
|
||||
const n = Number(alias);
|
||||
const aliases = Object.fromEntries(
|
||||
manifest
|
||||
.filter(x => x.lts && x.stable === stable)
|
||||
.map(x => [x.lts!.toLowerCase(), x])
|
||||
.reverse()
|
||||
);
|
||||
const numbered = Object.values(aliases);
|
||||
const release =
|
||||
alias === '*'
|
||||
? numbered[numbered.length - 1]
|
||||
: n < 0
|
||||
? numbered[numbered.length - 1 + n]
|
||||
: aliases[alias];
|
||||
|
||||
if (!release) {
|
||||
throw new Error(
|
||||
`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(
|
||||
`Found LTS release '${release.version}' for Node version '${versionSpec}'`
|
||||
);
|
||||
|
||||
return release.version.split('.')[0];
|
||||
}
|
||||
|
||||
async function getInfoFromManifest(
|
||||
versionSpec: string,
|
||||
stable: boolean,
|
||||
auth: string | undefined,
|
||||
osArch: string = translateArchToDistUrl(os.arch()),
|
||||
manifest: tc.IToolRelease[] | undefined
|
||||
): Promise<INodeVersionInfo | null> {
|
||||
let info: INodeVersionInfo | null = null;
|
||||
if (!manifest) {
|
||||
core.debug('No manifest cached');
|
||||
manifest = await getManifest(auth);
|
||||
}
|
||||
|
||||
const rel = await tc.findFromManifest(versionSpec, stable, manifest, osArch);
|
||||
|
||||
if (rel && rel.files.length > 0) {
|
||||
info = <INodeVersionInfo>{};
|
||||
info.resolvedVersion = rel.version;
|
||||
info.arch = rel.files[0].arch;
|
||||
info.downloadUrl = rel.files[0].download_url;
|
||||
info.fileName = rel.files[0].filename;
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
async function getInfoFromDist(
|
||||
versionSpec: string,
|
||||
arch: string = os.arch(),
|
||||
nodeVersions?: INodeVersion[]
|
||||
): Promise<INodeVersionInfo | null> {
|
||||
let osPlat: string = os.platform();
|
||||
let osArch: string = translateArchToDistUrl(arch);
|
||||
|
||||
let version: string = await queryDistForMatch(
|
||||
versionSpec,
|
||||
arch,
|
||||
nodeVersions
|
||||
);
|
||||
|
||||
if (!version) {
|
||||
return null;
|
||||
}
|
||||
|
||||
//
|
||||
// Download - a tool installer intimately knows how to get the tool (and construct urls)
|
||||
//
|
||||
version = semver.clean(version) || '';
|
||||
let fileName: string =
|
||||
osPlat == 'win32'
|
||||
? `node-v${version}-win-${osArch}`
|
||||
: `node-v${version}-${osPlat}-${osArch}`;
|
||||
let urlFileName: string =
|
||||
osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`;
|
||||
let url = `https://nodejs.org/dist/v${version}/${urlFileName}`;
|
||||
|
||||
return <INodeVersionInfo>{
|
||||
downloadUrl: url,
|
||||
resolvedVersion: version,
|
||||
arch: arch,
|
||||
fileName: fileName
|
||||
};
|
||||
}
|
||||
|
||||
async function resolveVersionFromManifest(
|
||||
versionSpec: string,
|
||||
stable: boolean,
|
||||
auth: string | undefined,
|
||||
osArch: string = translateArchToDistUrl(os.arch()),
|
||||
manifest: tc.IToolRelease[] | undefined
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const info = await getInfoFromManifest(
|
||||
versionSpec,
|
||||
stable,
|
||||
auth,
|
||||
osArch,
|
||||
manifest
|
||||
);
|
||||
return info?.resolvedVersion;
|
||||
} catch (err) {
|
||||
core.info('Unable to resolve version from manifest...');
|
||||
core.debug(err.message);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO - should we just export this from @actions/tool-cache? Lifted directly from there
|
||||
function evaluateVersions(versions: string[], versionSpec: string): string {
|
||||
let version = '';
|
||||
core.debug(`evaluating ${versions.length} versions`);
|
||||
versions = versions.sort((a, b) => {
|
||||
if (semver.gt(a, b)) {
|
||||
return 1;
|
||||
}
|
||||
return -1;
|
||||
});
|
||||
for (let i = versions.length - 1; i >= 0; i--) {
|
||||
const potential: string = versions[i];
|
||||
const satisfied: boolean = semver.satisfies(potential, versionSpec);
|
||||
if (satisfied) {
|
||||
version = potential;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (version) {
|
||||
core.debug(`matched: ${version}`);
|
||||
} else {
|
||||
core.debug('match not found');
|
||||
}
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
async function queryDistForMatch(
|
||||
versionSpec: string,
|
||||
arch: string = os.arch(),
|
||||
nodeVersions?: INodeVersion[]
|
||||
): Promise<string> {
|
||||
let osPlat: string = os.platform();
|
||||
let osArch: string = translateArchToDistUrl(arch);
|
||||
|
||||
// node offers a json list of versions
|
||||
let dataFileName: string;
|
||||
switch (osPlat) {
|
||||
case 'linux':
|
||||
dataFileName = `linux-${osArch}`;
|
||||
break;
|
||||
case 'darwin':
|
||||
dataFileName = `osx-${osArch}-tar`;
|
||||
break;
|
||||
case 'win32':
|
||||
dataFileName = `win-${osArch}-exe`;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected OS '${osPlat}'`);
|
||||
}
|
||||
|
||||
if (!nodeVersions) {
|
||||
core.debug('No dist manifest cached');
|
||||
nodeVersions = await getVersionsFromDist();
|
||||
}
|
||||
|
||||
let versions: string[] = [];
|
||||
|
||||
if (isLatestSyntax(versionSpec)) {
|
||||
core.info(`getting latest node version...`);
|
||||
return nodeVersions[0].version;
|
||||
}
|
||||
|
||||
nodeVersions.forEach((nodeVersion: INodeVersion) => {
|
||||
// ensure this version supports your os and platform
|
||||
if (nodeVersion.files.indexOf(dataFileName) >= 0) {
|
||||
versions.push(nodeVersion.version);
|
||||
}
|
||||
});
|
||||
|
||||
// get the latest version that matches the version spec
|
||||
let version: string = evaluateVersions(versions, versionSpec);
|
||||
return version;
|
||||
}
|
||||
|
||||
export async function getVersionsFromDist(): Promise<INodeVersion[]> {
|
||||
let dataUrl = 'https://nodejs.org/dist/index.json';
|
||||
let httpClient = new hc.HttpClient('setup-node', [], {
|
||||
allowRetries: true,
|
||||
maxRetries: 3
|
||||
});
|
||||
let response = await httpClient.getJson<INodeVersion[]>(dataUrl);
|
||||
return response.result || [];
|
||||
}
|
||||
|
||||
// For non LTS versions of Node, the files we need (for Windows) are sometimes located
|
||||
// in a different folder than they normally are for other versions.
|
||||
// Normally the format is similar to: https://nodejs.org/dist/v5.10.1/node-v5.10.1-win-x64.7z
|
||||
// In this case, there will be two files located at:
|
||||
// /dist/v5.10.1/win-x64/node.exe
|
||||
// /dist/v5.10.1/win-x64/node.lib
|
||||
// If this is not the structure, there may also be two files located at:
|
||||
// /dist/v0.12.18/node.exe
|
||||
// /dist/v0.12.18/node.lib
|
||||
// This method attempts to download and cache the resources from these alternative locations.
|
||||
// Note also that the files are normally zipped but in this case they are just an exe
|
||||
// and lib file in a folder, not zipped.
|
||||
async function acquireNodeFromFallbackLocation(
|
||||
version: string,
|
||||
arch: string = os.arch()
|
||||
): Promise<string> {
|
||||
let osPlat: string = os.platform();
|
||||
let osArch: string = translateArchToDistUrl(arch);
|
||||
|
||||
// Create temporary folder to download in to
|
||||
const tempDownloadFolder: string =
|
||||
'temp_' + Math.floor(Math.random() * 2000000000);
|
||||
const tempDirectory = process.env['RUNNER_TEMP'] || '';
|
||||
assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
|
||||
const tempDir: string = path.join(tempDirectory, tempDownloadFolder);
|
||||
await io.mkdirP(tempDir);
|
||||
let exeUrl: string;
|
||||
let libUrl: string;
|
||||
try {
|
||||
exeUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.exe`;
|
||||
libUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.lib`;
|
||||
|
||||
core.info(`Downloading only node binary from ${exeUrl}`);
|
||||
|
||||
const exePath = await tc.downloadTool(exeUrl);
|
||||
await io.cp(exePath, path.join(tempDir, 'node.exe'));
|
||||
const libPath = await tc.downloadTool(libUrl);
|
||||
await io.cp(libPath, path.join(tempDir, 'node.lib'));
|
||||
} catch (err) {
|
||||
if (err instanceof tc.HTTPError && err.httpStatusCode == 404) {
|
||||
exeUrl = `https://nodejs.org/dist/v${version}/node.exe`;
|
||||
libUrl = `https://nodejs.org/dist/v${version}/node.lib`;
|
||||
|
||||
const exePath = await tc.downloadTool(exeUrl);
|
||||
await io.cp(exePath, path.join(tempDir, 'node.exe'));
|
||||
const libPath = await tc.downloadTool(libUrl);
|
||||
await io.cp(libPath, path.join(tempDir, 'node.lib'));
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
let toolPath = await tc.cacheDir(tempDir, 'node', version, arch);
|
||||
core.addPath(toolPath);
|
||||
return toolPath;
|
||||
}
|
||||
|
||||
// os.arch does not always match the relative download url, e.g.
|
||||
// os.arch == 'arm' != node-v12.13.1-linux-armv7l.tar.gz
|
||||
// All other currently supported architectures match, e.g.:
|
||||
// os.arch = arm64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-arm64.tar.gz
|
||||
// os.arch = x64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-x64.tar.gz
|
||||
function translateArchToDistUrl(arch: string): string {
|
||||
switch (arch) {
|
||||
case 'arm':
|
||||
return 'armv7l';
|
||||
default:
|
||||
return arch;
|
||||
}
|
||||
}
|
||||
|
||||
export function parseNodeVersionFile(contents: string): string {
|
||||
let nodeVersion: string | undefined;
|
||||
|
||||
// Try parsing the file as an NPM `package.json` file.
|
||||
try {
|
||||
nodeVersion = JSON.parse(contents).volta?.node;
|
||||
if (!nodeVersion) nodeVersion = JSON.parse(contents).engines?.node;
|
||||
} catch {
|
||||
core.info('Node version file is not JSON file');
|
||||
}
|
||||
|
||||
if (!nodeVersion) {
|
||||
const found = contents.match(/^(?:nodejs\s+)?v?(?<version>[^\s]+)$/m);
|
||||
nodeVersion = found?.groups?.version;
|
||||
}
|
||||
|
||||
// In the case of an unknown format,
|
||||
// return as is and evaluate the version separately.
|
||||
if (!nodeVersion) nodeVersion = contents.trim();
|
||||
|
||||
return nodeVersion as string;
|
||||
}
|
||||
|
||||
function isLatestSyntax(versionSpec): boolean {
|
||||
return ['current', 'latest', 'node'].includes(versionSpec);
|
||||
}
|
69
src/main.ts
69
src/main.ts
@ -1,12 +1,14 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as installer from './installer';
|
||||
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
|
||||
import * as auth from './authutil';
|
||||
import * as path from 'path';
|
||||
import {restoreCache} from './cache-restore';
|
||||
import {isGhes, isCacheFeatureAvailable} from './cache-utils';
|
||||
import os = require('os');
|
||||
import {isCacheFeatureAvailable} from './cache-utils';
|
||||
import {getNodejsDistribution} from './distributions/installer-factory';
|
||||
import {parseNodeVersionFile, printEnvDetailsAndSetOutput} from './util';
|
||||
|
||||
export async function run() {
|
||||
try {
|
||||
@ -14,7 +16,7 @@ export async function run() {
|
||||
// Version is optional. If supplied, install / use from the tool cache
|
||||
// If not supplied then task is still used to setup proxy, auth, etc...
|
||||
//
|
||||
let version = resolveVersionInput();
|
||||
const version = resolveVersionInput();
|
||||
|
||||
let arch = core.getInput('architecture');
|
||||
const cache = core.getInput('cache');
|
||||
@ -32,12 +34,21 @@ export async function run() {
|
||||
}
|
||||
|
||||
if (version) {
|
||||
let token = core.getInput('token');
|
||||
let auth = !token || isGhes() ? undefined : `token ${token}`;
|
||||
let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE';
|
||||
const token = core.getInput('token');
|
||||
const auth = !token ? undefined : `token ${token}`;
|
||||
const stable =
|
||||
(core.getInput('stable') || 'true').toUpperCase() === 'TRUE';
|
||||
const checkLatest =
|
||||
(core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE';
|
||||
await installer.getNode(version, stable, checkLatest, auth, arch);
|
||||
const nodejsInfo = {
|
||||
versionSpec: version,
|
||||
checkLatest,
|
||||
auth,
|
||||
stable,
|
||||
arch
|
||||
};
|
||||
const nodeDistribution = getNodejsDistribution(nodejsInfo);
|
||||
await nodeDistribution.setupNodeJs();
|
||||
}
|
||||
|
||||
await printEnvDetailsAndSetOutput();
|
||||
@ -92,48 +103,10 @@ function resolveVersionInput(): string {
|
||||
);
|
||||
}
|
||||
|
||||
version = installer.parseNodeVersionFile(
|
||||
fs.readFileSync(versionFilePath, 'utf8')
|
||||
);
|
||||
version = parseNodeVersionFile(fs.readFileSync(versionFilePath, 'utf8'));
|
||||
|
||||
core.info(`Resolved ${versionFileInput} as ${version}`);
|
||||
}
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
export async function printEnvDetailsAndSetOutput() {
|
||||
core.startGroup('Environment details');
|
||||
|
||||
const promises = ['node', 'npm', 'yarn'].map(async tool => {
|
||||
const output = await getToolVersion(tool, ['--version']);
|
||||
|
||||
if (tool === 'node') {
|
||||
core.setOutput(`${tool}-version`, output);
|
||||
}
|
||||
|
||||
core.info(`${tool}: ${output}`);
|
||||
});
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
async function getToolVersion(tool: string, options: string[]) {
|
||||
try {
|
||||
const {stdout, stderr, exitCode} = await exec.getExecOutput(tool, options, {
|
||||
ignoreReturnCode: true,
|
||||
silent: true
|
||||
});
|
||||
|
||||
if (exitCode > 0) {
|
||||
core.warning(`[warning]${stderr}`);
|
||||
return '';
|
||||
}
|
||||
|
||||
return stdout;
|
||||
} catch (err) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
72
src/util.ts
Normal file
72
src/util.ts
Normal file
@ -0,0 +1,72 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
|
||||
export function parseNodeVersionFile(contents: string): string {
|
||||
let nodeVersion: string | undefined;
|
||||
|
||||
// Try parsing the file as an NPM `package.json` file.
|
||||
try {
|
||||
nodeVersion = JSON.parse(contents).volta?.node;
|
||||
if (!nodeVersion) nodeVersion = JSON.parse(contents).engines?.node;
|
||||
} catch {
|
||||
core.info('Node version file is not JSON file');
|
||||
}
|
||||
|
||||
if (!nodeVersion) {
|
||||
const found = contents.match(/^(?:node(js)?\s+)?v?(?<version>[^\s]+)$/m);
|
||||
nodeVersion = found?.groups?.version;
|
||||
}
|
||||
|
||||
// In the case of an unknown format,
|
||||
// return as is and evaluate the version separately.
|
||||
if (!nodeVersion) nodeVersion = contents.trim();
|
||||
|
||||
return nodeVersion as string;
|
||||
}
|
||||
|
||||
export async function printEnvDetailsAndSetOutput() {
|
||||
core.startGroup('Environment details');
|
||||
|
||||
const promises = ['node', 'npm', 'yarn'].map(async tool => {
|
||||
const output = await getToolVersion(tool, ['--version']);
|
||||
|
||||
return {tool, output};
|
||||
});
|
||||
|
||||
const tools = await Promise.all(promises);
|
||||
tools.forEach(({tool, output}) => {
|
||||
if (tool === 'node') {
|
||||
core.setOutput(`${tool}-version`, output);
|
||||
}
|
||||
core.info(`${tool}: ${output}`);
|
||||
});
|
||||
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
async function getToolVersion(tool: string, options: string[]) {
|
||||
try {
|
||||
const {stdout, stderr, exitCode} = await exec.getExecOutput(tool, options, {
|
||||
ignoreReturnCode: true,
|
||||
silent: true
|
||||
});
|
||||
|
||||
if (exitCode > 0) {
|
||||
core.info(`[warning]${stderr}`);
|
||||
return '';
|
||||
}
|
||||
|
||||
return stdout.trim();
|
||||
} catch (err) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export const unique = () => {
|
||||
const encountered = new Set();
|
||||
return (value: unknown): boolean => {
|
||||
if (encountered.has(value)) return false;
|
||||
encountered.add(value);
|
||||
return true;
|
||||
};
|
||||
};
|
@ -7,7 +7,8 @@
|
||||
"sourceMap": true,
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
"noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
"resolveJsonModule": true, /* Allows importing modules with a '.json' extension, which is a common practice in node projects. */
|
||||
},
|
||||
"exclude": ["__tests__", "lib", "node_modules"]
|
||||
}
|
||||
|
Reference in New Issue
Block a user