You've already forked setup-node
mirror of
https://github.com/actions/setup-node.git
synced 2025-07-01 20:53:12 +07:00
Compare commits
35 Commits
Author | SHA1 | Date | |
---|---|---|---|
bea5baf987 | |||
d82f92a0eb | |||
ca2d4e0cdd | |||
c7a93deeac | |||
34050076a5 | |||
f8aa08ed8e | |||
e2d34eacc8 | |||
ef9c88b169 | |||
ea800d4ebc | |||
cb95c398f6 | |||
69b2dd252e | |||
e33196f742 | |||
c6722d36aa | |||
8170e22e8f | |||
698d50532e | |||
869f4dd0c7 | |||
10efafcbcf | |||
7d16907b89 | |||
d0d39bda2f | |||
15a2477e08 | |||
7598dbcd6e | |||
a9893b0cfb | |||
5b32c9063c | |||
d98fa11138 | |||
9d255ef245 | |||
e828f9b7f3 | |||
a4fcaaf314 | |||
10f5623502 | |||
fcd18100cc | |||
962678f22c | |||
7c29869aec | |||
ae9f0f7448 | |||
3dbcda8bc2 | |||
f38519bb96 | |||
9227cda3f0 |
6
.eslintignore
Normal file
6
.eslintignore
Normal file
@ -0,0 +1,6 @@
|
||||
# Ignore list
|
||||
/*
|
||||
|
||||
# Do not ignore these folders:
|
||||
!__tests__/
|
||||
!src/
|
51
.eslintrc.js
Normal file
51
.eslintrc.js
Normal file
@ -0,0 +1,51 @@
|
||||
// This is a reusable configuration file copied from https://github.com/actions/reusable-workflows/tree/main/reusable-configurations. Please don't make changes to this file as it's the subject of an automatic update.
|
||||
module.exports = {
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:eslint-plugin-jest/recommended',
|
||||
'eslint-config-prettier'
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
plugins: ['@typescript-eslint', 'eslint-plugin-node', 'eslint-plugin-jest'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-require-imports': 'error',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/ban-ts-comment': [
|
||||
'error',
|
||||
{
|
||||
'ts-ignore': 'allow-with-description'
|
||||
}
|
||||
],
|
||||
'no-console': 'error',
|
||||
'yoda': 'error',
|
||||
'prefer-const': [
|
||||
'error',
|
||||
{
|
||||
destructuring: 'all'
|
||||
}
|
||||
],
|
||||
'no-control-regex': 'off',
|
||||
'no-constant-condition': ['error', {checkLoops: false}],
|
||||
'node/no-extraneous-import': 'error'
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['**/*{test,spec}.ts'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'jest/no-standalone-expect': 'off',
|
||||
'jest/no-conditional-expect': 'off',
|
||||
'no-console': 'off',
|
||||
|
||||
}
|
||||
}
|
||||
],
|
||||
env: {
|
||||
node: true,
|
||||
es6: true,
|
||||
'jest/globals': true
|
||||
}
|
||||
};
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1 +1,2 @@
|
||||
* text=auto eol=lf
|
||||
.licenses/** -diff linguist-generated=true
|
||||
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1 +1 @@
|
||||
blank_issues_enabled: false
|
||||
blank_issues_enabled: false
|
||||
|
2
.github/workflows/basic-validation.yml
vendored
2
.github/workflows/basic-validation.yml
vendored
@ -14,4 +14,4 @@ on:
|
||||
jobs:
|
||||
call-basic-validation:
|
||||
name: Basic validation
|
||||
uses: actions/reusable-workflows/.github/workflows/basic-validation.yml@main
|
||||
uses: actions/reusable-workflows/.github/workflows/basic-validation.yml@main
|
||||
|
4
.github/workflows/check-dist.yml
vendored
4
.github/workflows/check-dist.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: Check dist/
|
||||
name: Check dist
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -14,4 +14,4 @@ on:
|
||||
jobs:
|
||||
call-check-dist:
|
||||
name: Check dist/
|
||||
uses: actions/reusable-workflows/.github/workflows/check-dist.yml@main
|
||||
uses: actions/reusable-workflows/.github/workflows/check-dist.yml@main
|
||||
|
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@ -2,13 +2,13 @@ name: CodeQL analysis
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '0 3 * * 0'
|
||||
|
||||
jobs:
|
||||
call-codeQL-analysis:
|
||||
name: CodeQL analysis
|
||||
uses: actions/reusable-workflows/.github/workflows/codeql-analysis.yml@main
|
||||
name: CodeQL analysis
|
||||
uses: actions/reusable-workflows/.github/workflows/codeql-analysis.yml@main
|
||||
|
109
.github/workflows/e2e-cache.yml
vendored
109
.github/workflows/e2e-cache.yml
vendored
@ -134,3 +134,112 @@ jobs:
|
||||
- name: Verify node and yarn
|
||||
run: __tests__/verify-node.sh "${{ matrix.node-version }}"
|
||||
shell: bash
|
||||
|
||||
yarn-subprojects:
|
||||
name: Test yarn subprojects
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh yarn1
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# ##[debug]Cache Paths:
|
||||
# ##[debug]["sub2/.yarn/cache","sub3/.yarn/cache","../../../.cache/yarn/v6"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
**/*.lock
|
||||
yarn.lock
|
||||
|
||||
yarn-subprojects-berry-local:
|
||||
name: Test yarn subprojects all locally managed
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh keepcache keepcache
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# ##[info]All dependencies are managed locally by yarn3, the previous cache can be used
|
||||
# ##[debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee","node-cache-Linux-yarn"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
||||
yarn-subprojects-berry-global:
|
||||
name: Test yarn subprojects some locally managed
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh global
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log must
|
||||
# ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" dependencies are managed by yarn 3 locally
|
||||
# ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" dependencies are not managed by yarn 3 locally
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
||||
yarn-subprojects-berry-git:
|
||||
name: Test yarn subprojects managed by git
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: /bin/bash __tests__/prepare-yarn-subprojects.sh keepcache
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# [debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" has .yarn/cache - dependencies are kept in the repository
|
||||
# [debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" has .yarn/cache - dependencies are kept in the repository
|
||||
# [debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
10
.github/workflows/release-new-action-version.yml
vendored
10
.github/workflows/release-new-action-version.yml
vendored
@ -21,8 +21,8 @@ jobs:
|
||||
name: releaseNewActionVersion
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Update the ${{ env.TAG_NAME }} tag
|
||||
uses: actions/publish-action@v0.2.1
|
||||
with:
|
||||
source-tag: ${{ env.TAG_NAME }}
|
||||
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
|
||||
- name: Update the ${{ env.TAG_NAME }} tag
|
||||
uses: actions/publish-action@v0.2.2
|
||||
with:
|
||||
source-tag: ${{ env.TAG_NAME }}
|
||||
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
|
||||
|
11
.github/workflows/update-config-files.yml
vendored
Normal file
11
.github/workflows/update-config-files.yml
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
name: Update configuration files
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * 0'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
call-update-configuration-files:
|
||||
name: Update configuration files
|
||||
uses: actions/reusable-workflows/.github/workflows/update-config-files.yml@main
|
15
.github/workflows/versions.yml
vendored
15
.github/workflows/versions.yml
vendored
@ -57,7 +57,12 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version: ['20-v8-canary', '20.0.0-v8-canary','20.0.0-v8-canary20221103f7e2421e91']
|
||||
node-version:
|
||||
[
|
||||
'20-v8-canary',
|
||||
'20.0.0-v8-canary',
|
||||
'20.0.0-v8-canary20221101e50e45c9f8'
|
||||
]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node
|
||||
@ -77,7 +82,8 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version: [16.0.0-nightly20210420a0261d231c, 17-nightly, 18.0.0-nightly]
|
||||
node-version:
|
||||
[16.0.0-nightly20210420a0261d231c, 17-nightly, 18.0.0-nightly]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node
|
||||
@ -152,7 +158,8 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version-file: [.nvmrc, .tool-versions, package.json]
|
||||
node-version-file:
|
||||
[.nvmrc, .tool-versions, .tool-versions-node, package.json]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Remove volta from package.json
|
||||
@ -170,7 +177,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ ubuntu-latest, windows-latest, macos-latest ]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup node from node version file
|
||||
|
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@actions/cache"
|
||||
version: 3.0.4
|
||||
version: 3.2.1
|
||||
type: npm
|
||||
summary: Actions cache lib
|
||||
homepage: https://github.com/actions/toolkit/tree/main/packages/cache
|
||||
|
4
.licenses/npm/@azure/abort-controller.dep.yml
generated
4
.licenses/npm/@azure/abort-controller.dep.yml
generated
@ -1,9 +1,9 @@
|
||||
---
|
||||
name: "@azure/abort-controller"
|
||||
version: 1.0.4
|
||||
version: 1.1.0
|
||||
type: npm
|
||||
summary: Microsoft Azure SDK for JavaScript - Aborter
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/core/abort-controller/README.md
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
|
2
.licenses/npm/@azure/core-http.dep.yml
generated
2
.licenses/npm/@azure/core-http.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@azure/core-http"
|
||||
version: 2.2.4
|
||||
version: 3.0.1
|
||||
type: npm
|
||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||
libraries generated using AutoRest
|
||||
|
@ -1,16 +1,16 @@
|
||||
---
|
||||
name: ip-regex
|
||||
version: 2.1.0
|
||||
name: "@azure/core-util"
|
||||
version: 1.3.0
|
||||
type: npm
|
||||
summary: Regular expression for matching IP addresses (IPv4 & IPv6)
|
||||
homepage: https://github.com/sindresorhus/ip-regex#readme
|
||||
summary: Core library for shared utility methods
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-util/
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: license
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
Copyright (c) 2020 Microsoft
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@ -19,16 +19,14 @@ licenses:
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
- sources: readme.md
|
||||
text: MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
notices: []
|
2
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
2
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@azure/ms-rest-js"
|
||||
version: 2.6.1
|
||||
version: 2.7.0
|
||||
type: npm
|
||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||
libraries generated using AutoRest
|
||||
|
2
.licenses/npm/@azure/storage-blob.dep.yml
generated
2
.licenses/npm/@azure/storage-blob.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@azure/storage-blob"
|
||||
version: 12.9.0
|
||||
version: 12.13.0
|
||||
type: npm
|
||||
summary: Microsoft Azure Storage SDK for JavaScript - Blob
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/
|
||||
|
2
.licenses/npm/@types/node-fetch.dep.yml
generated
2
.licenses/npm/@types/node-fetch.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@types/node-fetch"
|
||||
version: 2.6.1
|
||||
version: 2.6.3
|
||||
type: npm
|
||||
summary: TypeScript definitions for node-fetch
|
||||
homepage: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch
|
||||
|
43
.licenses/npm/psl.dep.yml
generated
43
.licenses/npm/psl.dep.yml
generated
@ -1,43 +0,0 @@
|
||||
---
|
||||
name: psl
|
||||
version: 1.8.0
|
||||
type: npm
|
||||
summary: Domain name parser based on the Public Suffix List
|
||||
homepage: https://github.com/lupomontero/psl#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero lupomontero@gmail.com
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero <lupomontero@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
notices: []
|
34
.licenses/npm/punycode.dep.yml
generated
34
.licenses/npm/punycode.dep.yml
generated
@ -1,34 +0,0 @@
|
||||
---
|
||||
name: punycode
|
||||
version: 2.1.1
|
||||
type: npm
|
||||
summary: A robust Punycode converter that fully complies to RFC 3492 and RFC 5891,
|
||||
and works on nearly all JavaScript platforms.
|
||||
homepage: https://mths.be/punycode
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE-MIT.txt
|
||||
text: |
|
||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: Punycode.js is available under the [MIT](https://mths.be/mit) license.
|
||||
notices: []
|
@ -1,9 +1,9 @@
|
||||
---
|
||||
name: semver
|
||||
version: 6.1.2
|
||||
version: 6.3.1
|
||||
type: npm
|
||||
summary: The semantic version parser used by npm.
|
||||
homepage: https://github.com/npm/node-semver#readme
|
||||
homepage:
|
||||
license: isc
|
||||
licenses:
|
||||
- sources: LICENSE
|
23
.licenses/npm/tough-cookie-3.0.1.dep.yml
generated
23
.licenses/npm/tough-cookie-3.0.1.dep.yml
generated
@ -1,23 +0,0 @@
|
||||
---
|
||||
name: tough-cookie
|
||||
version: 3.0.1
|
||||
type: npm
|
||||
summary: RFC6265 Cookies and Cookie Jar for node.js
|
||||
homepage: https://github.com/salesforce/tough-cookie
|
||||
license: bsd-3-clause
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
notices: []
|
23
.licenses/npm/tough-cookie-4.0.0.dep.yml
generated
23
.licenses/npm/tough-cookie-4.0.0.dep.yml
generated
@ -1,23 +0,0 @@
|
||||
---
|
||||
name: tough-cookie
|
||||
version: 4.0.0
|
||||
type: npm
|
||||
summary: RFC6265 Cookies and Cookie Jar for node.js
|
||||
homepage: https://github.com/salesforce/tough-cookie
|
||||
license: bsd-3-clause
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
notices: []
|
35
.licenses/npm/tslib-2.5.0.dep.yml
generated
Normal file
35
.licenses/npm/tslib-2.5.0.dep.yml
generated
Normal file
@ -0,0 +1,35 @@
|
||||
---
|
||||
name: tslib
|
||||
version: 2.5.0
|
||||
type: npm
|
||||
summary: Runtime library for TypeScript helper functions
|
||||
homepage: https://www.typescriptlang.org/
|
||||
license: 0bsd
|
||||
licenses:
|
||||
- sources: LICENSE.txt
|
||||
text: |-
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
notices:
|
||||
- sources: CopyrightNotice.txt
|
||||
text: "/******************************************************************************\r\nCopyright
|
||||
(c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute
|
||||
this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE
|
||||
SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD
|
||||
TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS.
|
||||
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE,
|
||||
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS
|
||||
ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS
|
||||
SOFTWARE.\r\n*****************************************************************************
|
||||
*/"
|
33
.licenses/npm/universalify.dep.yml
generated
33
.licenses/npm/universalify.dep.yml
generated
@ -1,33 +0,0 @@
|
||||
---
|
||||
name: universalify
|
||||
version: 0.1.2
|
||||
type: npm
|
||||
summary: Make a callback- or promise-based function support both promises and callbacks.
|
||||
homepage: https://github.com/RyanZim/universalify#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2017, Ryan Zimmerman <opensrc@ryanzim.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the 'Software'), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: MIT
|
||||
notices: []
|
2
.licenses/npm/xml2js.dep.yml
generated
2
.licenses/npm/xml2js.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: xml2js
|
||||
version: 0.4.23
|
||||
version: 0.5.0
|
||||
type: npm
|
||||
summary: Simple XML to JavaScript object converter.
|
||||
homepage: https://github.com/Leonidas-from-XIV/node-xml2js
|
||||
|
7
.prettierignore
Normal file
7
.prettierignore
Normal file
@ -0,0 +1,7 @@
|
||||
# Ignore list
|
||||
/*
|
||||
|
||||
# Do not ignore these folders:
|
||||
!__tests__/
|
||||
!.github/
|
||||
!src/
|
11
.prettierrc.js
Normal file
11
.prettierrc.js
Normal file
@ -0,0 +1,11 @@
|
||||
// This is a reusable configuration file copied from https://github.com/actions/reusable-workflows/tree/main/reusable-configurations. Please don't make changes to this file as it's the subject of an automatic update.
|
||||
module.exports = {
|
||||
printWidth: 80,
|
||||
tabWidth: 2,
|
||||
useTabs: false,
|
||||
semi: true,
|
||||
singleQuote: true,
|
||||
trailingComma: 'none',
|
||||
bracketSpacing: false,
|
||||
arrowParens: 'avoid'
|
||||
};
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript"
|
||||
}
|
72
README.md
72
README.md
@ -1,7 +1,8 @@
|
||||
# setup-node
|
||||
|
||||
[](https://github.com/actions/setup-node/actions/workflows/build-test.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/basic-validation.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/versions.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/e2e-cache.yml)
|
||||
[](https://github.com/actions/setup-node/actions/workflows/proxy.yml)
|
||||
|
||||
This action provides the following functionality for GitHub Actions users:
|
||||
@ -15,6 +16,69 @@ This action provides the following functionality for GitHub Actions users:
|
||||
|
||||
See [action.yml](action.yml)
|
||||
|
||||
<!-- start usage -->
|
||||
```yaml
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
# Version Spec of the version to use in SemVer notation.
|
||||
# It also emits such aliases as lts, latest, nightly and canary builds
|
||||
# Examples: 12.x, 10.15.1, >=10.15.0, lts/Hydrogen, 16-nightly, latest, node
|
||||
node-version: ''
|
||||
|
||||
# File containing the version Spec of the version to use. Examples: .nvmrc, .node-version, .tool-versions.
|
||||
# If node-version and node-version-file are both provided the action will use version from node-version.
|
||||
node-version-file: ''
|
||||
|
||||
# Set this option if you want the action to check for the latest available version
|
||||
# that satisfies the version spec.
|
||||
# It will only get affect for lts Nodejs versions (12.x, >=10.15.0, lts/Hydrogen).
|
||||
# Default: false
|
||||
check-latest: false
|
||||
|
||||
# Target architecture for Node to use. Examples: x86, x64. Will use system architecture by default.
|
||||
# Default: ''. The action use system architecture by default
|
||||
architecture: ''
|
||||
|
||||
# Used to pull node distributions from https://github.com/actions/node-versions.
|
||||
# Since there's a default, this is typically not supplied by the user.
|
||||
# When running this action on github.com, the default value is sufficient.
|
||||
# When running on GHES, you can pass a personal access token for github.com if you are experiencing rate limiting.
|
||||
#
|
||||
# We recommend using a service account with the least permissions necessary. Also
|
||||
# when generating a new PAT, select the least scopes necessary.
|
||||
#
|
||||
# [Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
||||
#
|
||||
# Default: ${{ github.server_url == 'https://github.com' && github.token || '' }}
|
||||
token: ''
|
||||
|
||||
# Used to specify a package manager for caching in the default directory. Supported values: npm, yarn, pnpm.
|
||||
# Package manager should be pre-installed
|
||||
# Default: ''
|
||||
cache: ''
|
||||
|
||||
# Used to specify the path to a dependency file: package-lock.json, yarn.lock, etc.
|
||||
# It will generate hash from the target file for primary key. It works only If cache is specified.
|
||||
# Supports wildcards or a list of file names for caching multiple dependencies.
|
||||
# Default: ''
|
||||
cache-dependency-path: ''
|
||||
|
||||
# Optional registry to set up for auth. Will set the registry in a project level .npmrc and .yarnrc file,
|
||||
# and set up auth to read in from env.NODE_AUTH_TOKEN.
|
||||
# Default: ''
|
||||
registry-url: ''
|
||||
|
||||
# Optional scope for authenticating against scoped registries.
|
||||
# Will fall back to the repository owner when using the GitHub Packages registry (https://npm.pkg.github.com/).
|
||||
# Default: ''
|
||||
scope: ''
|
||||
|
||||
# Set always-auth option in npmrc file.
|
||||
# Default: ''
|
||||
always-auth: ''
|
||||
```
|
||||
<!-- end usage -->
|
||||
|
||||
**Basic:**
|
||||
|
||||
```yaml
|
||||
@ -22,7 +86,7 @@ steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
node-version: 18
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
```
|
||||
@ -35,7 +99,7 @@ For information regarding locally cached versions of Node.js on GitHub hosted ru
|
||||
|
||||
### Supported version syntax
|
||||
|
||||
The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to the [documentation](https://github.com/npm/node-semver).
|
||||
The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to [the semver package documentation](https://github.com/npm/node-semver).
|
||||
|
||||
Examples:
|
||||
|
||||
@ -132,7 +196,7 @@ If the runner is not able to access github.com, any Nodejs versions requested du
|
||||
- [Using a node version file](docs/advanced-usage.md#node-version-file)
|
||||
- [Using different architectures](docs/advanced-usage.md#architecture)
|
||||
- [Using v8 canary versions](docs/advanced-usage.md#v8-canary-versions)
|
||||
- [Using nigthly versions](docs/advanced-usage.md#nightly-versions)
|
||||
- [Using nightly versions](docs/advanced-usage.md#nightly-versions)
|
||||
- [Using rc versions](docs/advanced-usage.md#rc-versions)
|
||||
- [Caching packages data](docs/advanced-usage.md#caching-packages-data)
|
||||
- [Using multiple operating systems and architectures](docs/advanced-usage.md#multiple-operating-systems-and-architectures)
|
||||
|
@ -1,9 +1,10 @@
|
||||
import os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as auth from '../src/authutil';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
|
||||
let rcFile: string;
|
||||
|
||||
@ -15,11 +16,7 @@ describe('authutil tests', () => {
|
||||
let dbgSpy: jest.SpyInstance;
|
||||
|
||||
beforeAll(async () => {
|
||||
const randPath = path.join(
|
||||
Math.random()
|
||||
.toString(36)
|
||||
.substring(7)
|
||||
);
|
||||
const randPath = path.join(Math.random().toString(36).substring(7));
|
||||
console.log('::stop-commands::stoptoken'); // Disable executing of runner commands when running tests in actions
|
||||
process.env['GITHUB_ENV'] = ''; // Stub out Environment file functionality so we can verify it writes to standard out (toolkit is backwards compatible)
|
||||
const tempDir = path.join(_runnerDir, randPath, 'temp');
|
||||
@ -67,10 +64,10 @@ describe('authutil tests', () => {
|
||||
}, 100000);
|
||||
|
||||
function readRcFile(rcFile: string) {
|
||||
let rc = {};
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const rc = {};
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
for (const line of contents.split(os.EOL)) {
|
||||
let parts = line.split('=');
|
||||
const parts = line.split('=');
|
||||
if (parts.length == 2) {
|
||||
rc[parts[0].trim()] = parts[1].trim();
|
||||
}
|
||||
@ -82,8 +79,8 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
let rc = readRcFile(rcFile);
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -92,7 +89,7 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -102,7 +99,7 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['@myscope:registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -111,7 +108,7 @@ describe('authutil tests', () => {
|
||||
await auth.configAuthentication('npm.pkg.github.com', 'false');
|
||||
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['@ownername:registry']).toBe('npm.pkg.github.com/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
});
|
||||
@ -119,16 +116,16 @@ describe('authutil tests', () => {
|
||||
it('Sets up npmrc for always-auth true', async () => {
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['registry']).toBe('https://registry.npmjs.org/');
|
||||
expect(rc['always-auth']).toBe('true');
|
||||
});
|
||||
|
||||
it('It is already set the NODE_AUTH_TOKEN export it ', async () => {
|
||||
it('is already set the NODE_AUTH_TOKEN export it', async () => {
|
||||
process.env.NODE_AUTH_TOKEN = 'foobar';
|
||||
await auth.configAuthentication('npm.pkg.github.com', 'false');
|
||||
expect(fs.statSync(rcFile)).toBeDefined();
|
||||
let rc = readRcFile(rcFile);
|
||||
const rc = readRcFile(rcFile);
|
||||
expect(rc['@ownername:registry']).toBe('npm.pkg.github.com/');
|
||||
expect(rc['always-auth']).toBe('false');
|
||||
expect(process.env.NODE_AUTH_TOKEN).toEqual('foobar');
|
||||
@ -137,7 +134,7 @@ describe('authutil tests', () => {
|
||||
it('configAuthentication should overwrite non-scoped with non-scoped', async () => {
|
||||
fs.writeFileSync(rcFile, 'registry=NNN');
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
@ -146,7 +143,7 @@ describe('authutil tests', () => {
|
||||
it('configAuthentication should overwrite only non-scoped', async () => {
|
||||
fs.writeFileSync(rcFile, `registry=NNN${os.EOL}@myscope:registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@myscope:registry=MMM${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
@ -155,7 +152,7 @@ describe('authutil tests', () => {
|
||||
it('configAuthentication should add non-scoped to scoped', async () => {
|
||||
fs.writeFileSync(rcFile, '@myscope:registry=NNN');
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@myscope:registry=NNN${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
@ -165,7 +162,7 @@ describe('authutil tests', () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `@myscope:registry=NNN`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
@ -175,7 +172,7 @@ describe('authutil tests', () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `registry=NNN${os.EOL}@myscope:registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`registry=NNN${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
@ -185,7 +182,7 @@ describe('authutil tests', () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`registry=MMM${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
@ -198,7 +195,7 @@ describe('authutil tests', () => {
|
||||
`@otherscope:registry=NNN${os.EOL}@myscope:registry=MMM`
|
||||
);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@otherscope:registry=NNN${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
@ -208,7 +205,7 @@ describe('authutil tests', () => {
|
||||
process.env['INPUT_SCOPE'] = 'myscope';
|
||||
fs.writeFileSync(rcFile, `@otherscope:registry=MMM`);
|
||||
await auth.configAuthentication('https://registry.npmjs.org/', 'true');
|
||||
let contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
const contents = fs.readFileSync(rcFile, {encoding: 'utf8'});
|
||||
expect(contents).toBe(
|
||||
`@otherscope:registry=MMM${os.EOL}//registry.npmjs.org/:_authToken=\${NODE_AUTH_TOKEN}${os.EOL}@myscope:registry=https://registry.npmjs.org/${os.EOL}always-auth=true`
|
||||
);
|
||||
|
@ -32,13 +32,13 @@ describe('cache-restore', () => {
|
||||
|
||||
function findCacheFolder(command: string) {
|
||||
switch (command) {
|
||||
case utils.supportedPackageManagers.npm.getCacheFolderCommand:
|
||||
case 'npm config get cache':
|
||||
return npmCachePath;
|
||||
case utils.supportedPackageManagers.pnpm.getCacheFolderCommand:
|
||||
case 'pnpm store path --silent':
|
||||
return pnpmCachePath;
|
||||
case utils.supportedPackageManagers.yarn1.getCacheFolderCommand:
|
||||
case 'yarn cache dir':
|
||||
return yarn1CachePath;
|
||||
case utils.supportedPackageManagers.yarn2.getCacheFolderCommand:
|
||||
case 'yarn config get cacheFolder':
|
||||
return yarn2CachePath;
|
||||
default:
|
||||
return 'packge/not/found';
|
||||
@ -108,7 +108,7 @@ describe('cache-restore', () => {
|
||||
it.each([['npm7'], ['npm6'], ['pnpm6'], ['yarn1'], ['yarn2'], ['random']])(
|
||||
'Throw an error because %s is not supported',
|
||||
async packageManager => {
|
||||
await expect(restoreCache(packageManager)).rejects.toThrowError(
|
||||
await expect(restoreCache(packageManager, '')).rejects.toThrow(
|
||||
`Caching for '${packageManager}' is not supported`
|
||||
);
|
||||
}
|
||||
@ -132,7 +132,7 @@ describe('cache-restore', () => {
|
||||
}
|
||||
});
|
||||
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache restored from key: node-cache-${platform}-${packageManager}-${fileHash}`
|
||||
@ -163,7 +163,7 @@ describe('cache-restore', () => {
|
||||
});
|
||||
|
||||
restoreCacheSpy.mockImplementationOnce(() => undefined);
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`${packageManager} cache is not found`
|
||||
|
@ -18,7 +18,7 @@ describe('run', () => {
|
||||
const commonPath = '/some/random/path';
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
|
||||
let inputs = {} as any;
|
||||
const inputs = {} as any;
|
||||
|
||||
let getInputSpy: jest.SpyInstance;
|
||||
let infoSpy: jest.SpyInstance;
|
||||
@ -92,6 +92,9 @@ describe('run', () => {
|
||||
|
||||
it('Package manager is not valid, skip caching', async () => {
|
||||
inputs['cache'] = 'yarn3';
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePackageManager ? inputs['cache'] : ''
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
@ -107,18 +110,22 @@ describe('run', () => {
|
||||
describe('Validate unchanged cache is not saved', () => {
|
||||
it('should not save cache for yarn1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -127,18 +134,22 @@ describe('run', () => {
|
||||
|
||||
it('should not save cache for yarn2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -147,35 +158,44 @@ describe('run', () => {
|
||||
|
||||
it('should not save cache for npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation(() => npmFileHash);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not save cache for pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation(() => pnpmFileHash);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@ -183,24 +203,24 @@ describe('run', () => {
|
||||
describe('action saves the cache', () => {
|
||||
it('saves cache from yarn 1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -213,24 +233,24 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from yarn 2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -243,21 +263,24 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -270,21 +293,24 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return pnpmFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CacheMatchedKey
|
||||
? pnpmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -297,24 +323,27 @@ describe('run', () => {
|
||||
|
||||
it('save with -1 cacheId , should not fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
return -1;
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -327,24 +356,27 @@ describe('run', () => {
|
||||
|
||||
it('saves with error from toolkit, should fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CachePackageManager
|
||||
? inputs['cache']
|
||||
: key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
throw new cache.ValidationError('Validation failed');
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getInputSpy).not.toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(4);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -2,7 +2,18 @@ import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
import path from 'path';
|
||||
import * as utils from '../src/cache-utils';
|
||||
import {PackageManagerInfo, isCacheFeatureAvailable} from '../src/cache-utils';
|
||||
import {
|
||||
PackageManagerInfo,
|
||||
isCacheFeatureAvailable,
|
||||
supportedPackageManagers,
|
||||
getCommandOutput,
|
||||
resetProjectDirectoriesMemoized
|
||||
} from '../src/cache-utils';
|
||||
import fs from 'fs';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
import * as glob from '@actions/glob';
|
||||
import {Globber} from '@actions/glob';
|
||||
import {MockGlobber} from './mock/glob-mock';
|
||||
|
||||
describe('cache-utils', () => {
|
||||
const versionYarn1 = '1.2.3';
|
||||
@ -12,8 +23,10 @@ describe('cache-utils', () => {
|
||||
let isFeatureAvailable: jest.SpyInstance;
|
||||
let info: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let fsRealPathSyncSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
console.log('::stop-commands::stoptoken');
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
debugSpy = jest.spyOn(core, 'debug');
|
||||
debugSpy.mockImplementation(msg => {});
|
||||
@ -24,13 +37,29 @@ describe('cache-utils', () => {
|
||||
isFeatureAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
|
||||
getCommandOutputSpy = jest.spyOn(utils, 'getCommandOutput');
|
||||
|
||||
fsRealPathSyncSpy = jest.spyOn(fs, 'realpathSync');
|
||||
fsRealPathSyncSpy.mockImplementation(dirName => {
|
||||
return dirName;
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
//jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
console.log('::stoptoken::');
|
||||
jest.restoreAllMocks();
|
||||
}, 100000);
|
||||
|
||||
describe('getPackageManagerInfo', () => {
|
||||
it.each<[string, PackageManagerInfo | null]>([
|
||||
['npm', utils.supportedPackageManagers.npm],
|
||||
['pnpm', utils.supportedPackageManagers.pnpm],
|
||||
['yarn', utils.supportedPackageManagers.yarn1],
|
||||
['yarn', utils.supportedPackageManagers.yarn],
|
||||
['yarn1', null],
|
||||
['yarn2', null],
|
||||
['npm7', null]
|
||||
@ -72,4 +101,263 @@ describe('cache-utils', () => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getCacheDirectoriesPaths', () => {
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let lstatSpy: jest.SpyInstance;
|
||||
let globCreateSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
existsSpy.mockImplementation(() => true);
|
||||
|
||||
lstatSpy = jest.spyOn(fs, 'lstatSync');
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => true
|
||||
}));
|
||||
|
||||
globCreateSpy = jest.spyOn(glob, 'create');
|
||||
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/foo', '/bar'])
|
||||
);
|
||||
|
||||
resetProjectDirectoriesMemoized();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
existsSpy.mockRestore();
|
||||
lstatSpy.mockRestore();
|
||||
globCreateSpy.mockRestore();
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should return one dir for non yarn',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
// to do not call for a version
|
||||
// call once for get cache folder
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
it('getCacheDirectoriesPaths should return one dir for yarn without cacheDependency', async () => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock'],
|
||||
[supportedPackageManagers.yarn, ''],
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should throw for getCommandOutput returning empty',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
// return empty string to indicate getCacheFolderPath failed
|
||||
// --version still works
|
||||
command.includes('version') ? '1.' : ''
|
||||
);
|
||||
|
||||
await expect(
|
||||
cacheUtils.getCacheDirectories(packageManagerInfo, cacheDependency)
|
||||
).rejects.toThrow(); //'Could not get cache folder path for /dir');
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should nothrow in case of having not directories',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => false
|
||||
}));
|
||||
|
||||
await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(warningSpy).toHaveBeenCalledTimes(1);
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependency}"`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return one dir without cacheDependency',
|
||||
async version => {
|
||||
getCommandOutputSpy.mockImplementationOnce(() => version);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `foo${version}`);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual([`foo${version}`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/tmp/dir1/file', '/tmp/dir2/file'])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency expanding to duplicates',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir1/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 uniq dirs despite duplicate cache directories',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version')
|
||||
? version
|
||||
: `file_${version}_${dirNo++ % 2}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_0`]);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(6);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 4 dirs with multiple globs',
|
||||
async version => {
|
||||
// simulate wrong indents
|
||||
const cacheDependencyPath = `/tmp/dir1/file
|
||||
/tmp/dir2/file
|
||||
/tmp/**/file
|
||||
`;
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file',
|
||||
'/tmp/dir4/file'
|
||||
])
|
||||
);
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
cacheDependencyPath
|
||||
);
|
||||
expect(dirs).toEqual([
|
||||
`file_${version}_1`,
|
||||
`file_${version}_2`,
|
||||
`file_${version}_3`,
|
||||
`file_${version}_4`
|
||||
]);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -12,11 +12,11 @@ import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
const nodeTestManifest = require('./data/versions-manifest.json');
|
||||
const nodeTestDist = require('./data/node-dist-index.json');
|
||||
const nodeTestDistNightly = require('./data/node-nightly-index.json');
|
||||
const nodeTestDistRc = require('./data/node-rc-index.json');
|
||||
const nodeV8CanaryTestDist = require('./data/v8-canary-dist-index.json');
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let inputs = {} as any;
|
||||
@ -95,13 +95,13 @@ describe('setup-node', () => {
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion>nodeTestDistRc;
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion>nodeTestDistNightly;
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else if (url.includes('/v8-canary')) {
|
||||
res = <INodeVersion>nodeV8CanaryTestDist;
|
||||
res = <INodeVersion[]>nodeV8CanaryTestDist;
|
||||
} else {
|
||||
res = <INodeVersion>nodeTestDist;
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
@ -154,7 +154,7 @@ describe('setup-node', () => {
|
||||
os['arch'] = 'x64';
|
||||
inputs.stable = 'true';
|
||||
|
||||
let toolPath = path.normalize(
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/20.0.0-v8-canary20221103f7e2421e91/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
@ -180,7 +180,7 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize(
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/20.0.0-v8-canary20221103f7e2421e91/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
@ -192,13 +192,13 @@ describe('setup-node', () => {
|
||||
]);
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
os.platform = 'linux';
|
||||
let errMsg = 'unhandled error message';
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '20.0.0-v8-canary20221103f7e2421e91';
|
||||
|
||||
findSpy.mockImplementation(() => {
|
||||
@ -224,7 +224,7 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '11.15.0';
|
||||
const versionSpec = '11.15.0';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -234,13 +234,13 @@ describe('setup-node', () => {
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
const toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
@ -257,7 +257,7 @@ describe('setup-node', () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
let versionSpec = '23.0.0-v8-canary20221103f7e2421e91';
|
||||
const versionSpec = '23.0.0-v8-canary20221103f7e2421e91';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
@ -275,12 +275,12 @@ describe('setup-node', () => {
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
let errMsg = 'unhandled download message';
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
let versionSpec = '19.0.0-v8-canary';
|
||||
const versionSpec = '19.0.0-v8-canary';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -327,14 +327,14 @@ describe('setup-node', () => {
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl = `https://nodejs.org/download/v8-canary/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
const expectedUrl = `https://nodejs.org/download/v8-canary/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
@ -502,7 +502,7 @@ describe('setup-node', () => {
|
||||
|
||||
describe('setup-node v8 canary tests', () => {
|
||||
it('v8 canary setup node flow with cached', async () => {
|
||||
let versionSpec = 'v20-v8-canary';
|
||||
const versionSpec = 'v20-v8-canary';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
|
1
__tests__/data/.tool-versions-node
Normal file
1
__tests__/data/.tool-versions-node
Normal file
@ -0,0 +1 @@
|
||||
node 14.0.0
|
161
__tests__/data/pnpm-lock.yaml
generated
161
__tests__/data/pnpm-lock.yaml
generated
@ -7,9 +7,11 @@ dependencies:
|
||||
express: 4.17.1
|
||||
|
||||
packages:
|
||||
|
||||
/accepts/1.3.7:
|
||||
resolution: {integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
mime-types: 2.1.31
|
||||
@ -21,7 +23,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/body-parser/1.19.0:
|
||||
resolution: {integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dependencies:
|
||||
bytes: 3.1.0
|
||||
@ -37,19 +42,28 @@ packages:
|
||||
dev: false
|
||||
|
||||
/bytes/3.1.0:
|
||||
resolution: {integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dev: false
|
||||
|
||||
/content-disposition/0.5.3:
|
||||
resolution: {integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
safe-buffer: 5.1.2
|
||||
dev: false
|
||||
|
||||
/content-type/1.0.4:
|
||||
resolution: {integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
@ -58,12 +72,18 @@ packages:
|
||||
dev: false
|
||||
|
||||
/cookie/0.4.0:
|
||||
resolution: {integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
/debug/2.6.9:
|
||||
resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
|
||||
}
|
||||
dependencies:
|
||||
ms: 2.0.0
|
||||
dev: false
|
||||
@ -96,7 +116,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/express/4.17.1:
|
||||
resolution: {integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==
|
||||
}
|
||||
engines: {node: '>= 0.10.0'}
|
||||
dependencies:
|
||||
accepts: 1.3.7
|
||||
@ -132,7 +155,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/finalhandler/1.1.2:
|
||||
resolution: {integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dependencies:
|
||||
debug: 2.6.9
|
||||
@ -145,7 +171,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/forwarded/0.2.0:
|
||||
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
@ -155,7 +184,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/http-errors/1.7.2:
|
||||
resolution: {integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
depd: 1.1.2
|
||||
@ -166,7 +198,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/http-errors/1.7.3:
|
||||
resolution: {integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
depd: 1.1.2
|
||||
@ -177,7 +212,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/iconv-lite/0.4.24:
|
||||
resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
|
||||
}
|
||||
engines: {node: '>=0.10.0'}
|
||||
dependencies:
|
||||
safer-buffer: 2.1.2
|
||||
@ -188,11 +226,17 @@ packages:
|
||||
dev: false
|
||||
|
||||
/inherits/2.0.4:
|
||||
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/ipaddr.js/1.9.1:
|
||||
resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
|
||||
}
|
||||
engines: {node: '>= 0.10'}
|
||||
dev: false
|
||||
|
||||
@ -211,19 +255,28 @@ packages:
|
||||
dev: false
|
||||
|
||||
/mime-db/1.48.0:
|
||||
resolution: {integrity: sha512-FM3QwxV+TnZYQ2aRqhlKBMHxk10lTbMt3bBkMAp54ddrNeVSfcQYOOKuGuy3Ddrm38I04If834fOUSq1yzslJQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-FM3QwxV+TnZYQ2aRqhlKBMHxk10lTbMt3bBkMAp54ddrNeVSfcQYOOKuGuy3Ddrm38I04If834fOUSq1yzslJQ==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
/mime-types/2.1.31:
|
||||
resolution: {integrity: sha512-XGZnNzm3QvgKxa8dpzyhFTHmpP3l5YNusmne07VUOXxou9CqUqYa/HBy124RqtVh/O2pECas/MOcsDgpilPOPg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-XGZnNzm3QvgKxa8dpzyhFTHmpP3l5YNusmne07VUOXxou9CqUqYa/HBy124RqtVh/O2pECas/MOcsDgpilPOPg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
mime-db: 1.48.0
|
||||
dev: false
|
||||
|
||||
/mime/1.6.0:
|
||||
resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
|
||||
}
|
||||
engines: {node: '>=4'}
|
||||
hasBin: true
|
||||
dev: false
|
||||
@ -233,11 +286,17 @@ packages:
|
||||
dev: false
|
||||
|
||||
/ms/2.1.1:
|
||||
resolution: {integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/negotiator/0.6.2:
|
||||
resolution: {integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
@ -249,7 +308,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/parseurl/1.3.3:
|
||||
resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dev: false
|
||||
|
||||
@ -258,7 +320,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/proxy-addr/2.0.7:
|
||||
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==
|
||||
}
|
||||
engines: {node: '>= 0.10'}
|
||||
dependencies:
|
||||
forwarded: 0.2.0
|
||||
@ -266,17 +331,26 @@ packages:
|
||||
dev: false
|
||||
|
||||
/qs/6.7.0:
|
||||
resolution: {integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==
|
||||
}
|
||||
engines: {node: '>=0.6'}
|
||||
dev: false
|
||||
|
||||
/range-parser/1.2.1:
|
||||
resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dev: false
|
||||
|
||||
/raw-body/2.4.0:
|
||||
resolution: {integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==
|
||||
}
|
||||
engines: {node: '>= 0.8'}
|
||||
dependencies:
|
||||
bytes: 3.1.0
|
||||
@ -286,15 +360,24 @@ packages:
|
||||
dev: false
|
||||
|
||||
/safe-buffer/5.1.2:
|
||||
resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/safer-buffer/2.1.2:
|
||||
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/send/0.17.1:
|
||||
resolution: {integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==
|
||||
}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
dependencies:
|
||||
debug: 2.6.9
|
||||
@ -313,7 +396,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/serve-static/1.14.1:
|
||||
resolution: {integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==
|
||||
}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
dependencies:
|
||||
encodeurl: 1.0.2
|
||||
@ -323,7 +409,10 @@ packages:
|
||||
dev: false
|
||||
|
||||
/setprototypeof/1.1.1:
|
||||
resolution: {integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==
|
||||
}
|
||||
dev: false
|
||||
|
||||
/statuses/1.5.0:
|
||||
@ -332,12 +421,18 @@ packages:
|
||||
dev: false
|
||||
|
||||
/toidentifier/1.0.0:
|
||||
resolution: {integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==
|
||||
}
|
||||
engines: {node: '>=0.6'}
|
||||
dev: false
|
||||
|
||||
/type-is/1.6.18:
|
||||
resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==}
|
||||
resolution:
|
||||
{
|
||||
integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
|
||||
}
|
||||
engines: {node: '>= 0.6'}
|
||||
dependencies:
|
||||
media-typer: 0.3.0
|
||||
|
@ -266,7 +266,7 @@ describe('main tests', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
|
||||
// expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
@ -286,7 +286,7 @@ describe('main tests', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
|
||||
// expect(logSpy).toHaveBeenCalledWith(`Found in cache @ ${toolPath}`);
|
||||
|
18
__tests__/mock/glob-mock.test.ts
Normal file
18
__tests__/mock/glob-mock.test.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import {MockGlobber} from './glob-mock';
|
||||
|
||||
describe('mocked globber tests', () => {
|
||||
it('globber should return generator', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const generator = globber.globGenerator();
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of generator) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
it('globber should return glob', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const result: string[] = await globber.glob();
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
});
|
29
__tests__/mock/glob-mock.ts
Normal file
29
__tests__/mock/glob-mock.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import {Globber} from '@actions/glob';
|
||||
|
||||
export class MockGlobber implements Globber {
|
||||
private readonly expected: string[];
|
||||
constructor(expected: string[]) {
|
||||
this.expected = expected;
|
||||
}
|
||||
getSearchPaths(): string[] {
|
||||
return this.expected.slice();
|
||||
}
|
||||
|
||||
async glob(): Promise<string[]> {
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of this.globGenerator()) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async *globGenerator(): AsyncGenerator<string, void> {
|
||||
for (const e of this.expected) {
|
||||
yield e;
|
||||
}
|
||||
}
|
||||
|
||||
static async create(expected: string[]): Promise<MockGlobber> {
|
||||
return new MockGlobber(expected);
|
||||
}
|
||||
}
|
@ -12,11 +12,11 @@ import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
const nodeTestManifest = require('./data/versions-manifest.json');
|
||||
const nodeTestDist = require('./data/node-dist-index.json');
|
||||
const nodeTestDistNightly = require('./data/node-nightly-index.json');
|
||||
const nodeTestDistRc = require('./data/node-rc-index.json');
|
||||
const nodeV8CanaryTestDist = require('./data/v8-canary-dist-index.json');
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let inputs = {} as any;
|
||||
@ -39,6 +39,7 @@ describe('setup-node', () => {
|
||||
let whichSpy: jest.SpyInstance;
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let mkdirpSpy: jest.SpyInstance;
|
||||
let cpSpy: jest.SpyInstance;
|
||||
let execSpy: jest.SpyInstance;
|
||||
let authSpy: jest.SpyInstance;
|
||||
let parseNodeVersionSpy: jest.SpyInstance;
|
||||
@ -51,6 +52,7 @@ describe('setup-node', () => {
|
||||
console.log('::stop-commands::stoptoken'); // Disable executing of runner commands when running tests in actions
|
||||
process.env['GITHUB_PATH'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
process.env['GITHUB_OUTPUT'] = ''; // Stub out ENV file functionality so we can verify it writes to standard out
|
||||
process.env['RUNNER_TEMP'] = '/runner_temp';
|
||||
inputs = {};
|
||||
inSpy = jest.spyOn(core, 'getInput');
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
@ -78,6 +80,7 @@ describe('setup-node', () => {
|
||||
whichSpy = jest.spyOn(io, 'which');
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
mkdirpSpy = jest.spyOn(io, 'mkdirP');
|
||||
cpSpy = jest.spyOn(io, 'cp');
|
||||
|
||||
// @actions/tool-cache
|
||||
isCacheActionAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
@ -89,11 +92,13 @@ describe('setup-node', () => {
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion>nodeTestDistRc;
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion>nodeTestDistNightly;
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else if (url.includes('/v8-canary')) {
|
||||
res = <INodeVersion[]>nodeV8CanaryTestDist;
|
||||
} else {
|
||||
res = <INodeVersion>nodeTestDist;
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
@ -146,7 +151,7 @@ describe('setup-node', () => {
|
||||
os['arch'] = 'x64';
|
||||
inputs.stable = 'true';
|
||||
|
||||
let toolPath = path.normalize(
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/16.0.0-nightly20210417bc31dc0e0f/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
@ -172,7 +177,7 @@ describe('setup-node', () => {
|
||||
os['arch'] = 'x64';
|
||||
inputs.stable = 'false';
|
||||
|
||||
let toolPath = path.normalize(
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/16.0.0-nightly20210415c3a5e15ebe/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
@ -199,7 +204,7 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize(
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/16.0.0-nightly20210417bc31dc0e0f/x64'
|
||||
);
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
@ -218,12 +223,12 @@ describe('setup-node', () => {
|
||||
'x64'
|
||||
);
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
let errMsg = 'unhandled error message';
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '16.0.0-nightly20210417bc31dc0e0f';
|
||||
|
||||
findAllVersionsSpy.mockImplementation(() => [
|
||||
@ -247,7 +252,7 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '13.13.1-nightly20200415947ddec091';
|
||||
const versionSpec = '13.13.1-nightly20200415947ddec091';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -258,7 +263,7 @@ describe('setup-node', () => {
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize(
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/13.13.1-nightly20200415947ddec091/x64'
|
||||
);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
@ -266,18 +271,104 @@ describe('setup-node', () => {
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('windows: falls back to exe version if not in manifest and not in node dist', async () => {
|
||||
os.platform = 'win32';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
const versionSpec = '13.13.1-nightly20200415947ddec091';
|
||||
|
||||
const workingUrls = [
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.exe`,
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.lib`
|
||||
];
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async url => {
|
||||
if (workingUrls.includes(url)) {
|
||||
return '/some/temp/path';
|
||||
}
|
||||
|
||||
throw new tc.HTTPError(404);
|
||||
});
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/13.13.1-nightly20200415947ddec091/x64'
|
||||
);
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
mkdirpSpy.mockImplementation(async () => {});
|
||||
cpSpy.mockImplementation(async () => {});
|
||||
|
||||
await main.run();
|
||||
|
||||
workingUrls.forEach(url => {
|
||||
expect(dlSpy).toHaveBeenCalledWith(url);
|
||||
});
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${toolPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('linux: does not fall back to exe version if not in manifest and not in node dist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
const versionSpec = '13.13.1-nightly20200415947ddec091';
|
||||
|
||||
const workingUrls = [
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.exe`,
|
||||
`https://nodejs.org/download/nightly/v${versionSpec}/win-x64/node.lib`
|
||||
];
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async url => {
|
||||
if (workingUrls.includes(url)) {
|
||||
return '/some/temp/path';
|
||||
}
|
||||
|
||||
throw new tc.HTTPError(404);
|
||||
});
|
||||
const toolPath = path.normalize(
|
||||
'/cache/node/13.13.1-nightly20200415947ddec091/x64'
|
||||
);
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
mkdirpSpy.mockImplementation(async () => {});
|
||||
cpSpy.mockImplementation(async () => {});
|
||||
|
||||
await main.run();
|
||||
|
||||
workingUrls.forEach(url => {
|
||||
expect(dlSpy).not.toHaveBeenCalledWith(url);
|
||||
});
|
||||
expect(cnSpy).toHaveBeenCalledWith(
|
||||
`::error::Unexpected HTTP response: 404${osm.EOL}`
|
||||
);
|
||||
});
|
||||
|
||||
it('does not find a version that does not exist', async () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
let versionSpec = '10.13.1-nightly20200415947ddec091';
|
||||
const versionSpec = '10.13.1-nightly20200415947ddec091';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
@ -290,12 +381,12 @@ describe('setup-node', () => {
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
let errMsg = 'unhandled download message';
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
let versionSpec = '18.0.0-nightly202204180699150267';
|
||||
const versionSpec = '18.0.0-nightly202204180699150267';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -339,14 +430,14 @@ describe('setup-node', () => {
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl = `https://nodejs.org/download/nightly/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
const expectedUrl = `https://nodejs.org/download/nightly/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
|
@ -13,11 +13,11 @@ import * as auth from '../src/authutil';
|
||||
import OfficialBuilds from '../src/distributions/official_builds/official_builds';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
const nodeTestManifest = require('./data/versions-manifest.json');
|
||||
const nodeTestDist = require('./data/node-dist-index.json');
|
||||
const nodeTestDistNightly = require('./data/node-nightly-index.json');
|
||||
const nodeTestDistRc = require('./data/node-rc-index.json');
|
||||
const nodeV8CanaryTestDist = require('./data/v8-canary-dist-index.json');
|
||||
import nodeTestManifest from './data/versions-manifest.json';
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let build: OfficialBuilds;
|
||||
@ -95,11 +95,11 @@ describe('setup-node', () => {
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion>nodeTestDistRc;
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion>nodeTestDistNightly;
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else {
|
||||
res = <INodeVersion>nodeTestDist;
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
@ -156,13 +156,13 @@ describe('setup-node', () => {
|
||||
async (versionSpec, platform, expectedVersion, expectedLts) => {
|
||||
os.platform = platform;
|
||||
os.arch = 'x64';
|
||||
let versions: tc.IToolRelease[] | null = await tc.getManifestFromRepo(
|
||||
const versions: tc.IToolRelease[] | null = await tc.getManifestFromRepo(
|
||||
'actions',
|
||||
'node-versions',
|
||||
'mocktoken'
|
||||
);
|
||||
expect(versions).toBeDefined();
|
||||
let match = await tc.findFromManifest(versionSpec, true, versions);
|
||||
const match = await tc.findFromManifest(versionSpec, true, versions);
|
||||
expect(match).toBeDefined();
|
||||
expect(match?.version).toBe(expectedVersion);
|
||||
expect((match as any).lts).toBe(expectedLts);
|
||||
@ -177,7 +177,7 @@ describe('setup-node', () => {
|
||||
inputs['node-version'] = '12';
|
||||
inputs.stable = 'true';
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
@ -189,7 +189,7 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
@ -201,16 +201,16 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
let errMsg = 'unhandled error message';
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '12';
|
||||
|
||||
findSpy.mockImplementation(() => {
|
||||
@ -231,27 +231,27 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
let versionSpec = '12.16.2';
|
||||
let resolvedVersion = versionSpec;
|
||||
const versionSpec = '12.16.2';
|
||||
const resolvedVersion = versionSpec;
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl =
|
||||
const expectedUrl =
|
||||
'https://github.com/actions/node-versions/releases/download/12.16.2-20200507.95/node-12.16.2-linux-x64.tar.gz';
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/12.16.2/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.16.2/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(getExecOutputSpy).toHaveBeenCalledWith(
|
||||
'node',
|
||||
@ -284,7 +284,7 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '11.15.0';
|
||||
const versionSpec = '11.15.0';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -318,7 +318,7 @@ describe('setup-node', () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
let versionSpec = '9.99.9';
|
||||
const versionSpec = '9.99.9';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
@ -336,13 +336,13 @@ describe('setup-node', () => {
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
let errMsg = 'unhandled download message';
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is in the manifest
|
||||
let versionSpec = '12.16.2';
|
||||
let resolvedVersion = versionSpec;
|
||||
const versionSpec = '12.16.2';
|
||||
const resolvedVersion = versionSpec;
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -376,7 +376,7 @@ describe('setup-node', () => {
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl =
|
||||
const expectedUrl =
|
||||
arch === 'x64'
|
||||
? `https://github.com/actions/node-versions/releases/download/${version}/node-${version}-${platform}-${arch}.zip`
|
||||
: `https://nodejs.org/dist/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
@ -385,7 +385,7 @@ describe('setup-node', () => {
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
@ -481,7 +481,7 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '11';
|
||||
const versionSpec = '11';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['check-latest'] = 'true';
|
||||
@ -492,13 +492,13 @@ describe('setup-node', () => {
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
const toolPath = path.normalize('/cache/node/11.11.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
@ -523,7 +523,7 @@ describe('setup-node', () => {
|
||||
os.arch = 'x64';
|
||||
|
||||
// a version which is not in the manifest but is in node dist
|
||||
let versionSpec = '12';
|
||||
const versionSpec = '12';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['check-latest'] = 'true';
|
||||
@ -537,13 +537,13 @@ describe('setup-node', () => {
|
||||
});
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/12.11.0/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.11.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
|
59
__tests__/prepare-yarn-subprojects.sh
Executable file
59
__tests__/prepare-yarn-subprojects.sh
Executable file
@ -0,0 +1,59 @@
|
||||
#!/bin/sh -e
|
||||
export YARN_ENABLE_IMMUTABLE_INSTALLS=false
|
||||
rm package.json
|
||||
rm package-lock.json
|
||||
echo "create yarn2 project in the sub2"
|
||||
mkdir sub2
|
||||
cd sub2
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 2.4.3
|
||||
yarn install
|
||||
|
||||
echo "create yarn3 project in the sub3"
|
||||
cd ..
|
||||
mkdir sub3
|
||||
cd sub3
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 3.5.1
|
||||
yarn install
|
||||
if [ x$1 = 'xglobal' ];then
|
||||
echo enableGlobalCache
|
||||
echo 'enableGlobalCache: true' >> .yarnrc.yml
|
||||
fi
|
||||
|
||||
cd ..
|
||||
if [ x$1 != 'xkeepcache' -a x$2 != 'xkeepcache' ]; then
|
||||
rm -rf sub2/.yarn/cache
|
||||
rm -rf sub3/.yarn/cache
|
||||
fi
|
||||
|
||||
if [ x$1 = 'xyarn1' ];then
|
||||
echo "create yarn1 project in the root"
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 1.22.19
|
||||
yarn install
|
||||
fi
|
@ -1,10 +1,12 @@
|
||||
import tscMatcher from '../.github/tsc.json';
|
||||
|
||||
describe('problem matcher tests', () => {
|
||||
it('tsc: matches TypeScript "pretty" error message', () => {
|
||||
const [
|
||||
{
|
||||
pattern: [{regexp}]
|
||||
}
|
||||
] = require('../.github/tsc.json').problemMatcher;
|
||||
] = tscMatcher.problemMatcher;
|
||||
const exampleErrorMessage =
|
||||
"lib/index.js:23:42 - error TS2345: Argument of type 'A' is not assignable to parameter of type 'B'.";
|
||||
|
||||
@ -25,7 +27,7 @@ describe('problem matcher tests', () => {
|
||||
{
|
||||
pattern: [{regexp}]
|
||||
}
|
||||
] = require('../.github/tsc.json').problemMatcher;
|
||||
] = tscMatcher.problemMatcher;
|
||||
const exampleErrorMessage =
|
||||
"lib/index.js(23,42): error TS2345: Argument of type 'A' is not assignable to parameter of type 'B'.";
|
||||
|
||||
|
@ -12,10 +12,10 @@ import * as main from '../src/main';
|
||||
import * as auth from '../src/authutil';
|
||||
import {INodeVersion} from '../src/distributions/base-models';
|
||||
|
||||
const nodeTestDist = require('./data/node-dist-index.json');
|
||||
const nodeTestDistNightly = require('./data/node-nightly-index.json');
|
||||
const nodeTestDistRc = require('./data/node-rc-index.json');
|
||||
const nodeV8CanaryTestDist = require('./data/v8-canary-dist-index.json');
|
||||
import nodeTestDist from './data/node-dist-index.json';
|
||||
import nodeTestDistNightly from './data/node-nightly-index.json';
|
||||
import nodeTestDistRc from './data/node-rc-index.json';
|
||||
import nodeV8CanaryTestDist from './data/v8-canary-dist-index.json';
|
||||
|
||||
describe('setup-node', () => {
|
||||
let inputs = {} as any;
|
||||
@ -86,11 +86,11 @@ describe('setup-node', () => {
|
||||
getJsonSpy.mockImplementation(url => {
|
||||
let res: any;
|
||||
if (url.includes('/rc')) {
|
||||
res = <INodeVersion>nodeTestDistRc;
|
||||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion>nodeTestDistNightly;
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else {
|
||||
res = <INodeVersion>nodeTestDist;
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
||||
return {result: res};
|
||||
@ -142,7 +142,7 @@ describe('setup-node', () => {
|
||||
inputs['node-version'] = '12.0.0-rc.1';
|
||||
inputs.stable = 'true';
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
@ -154,7 +154,7 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
@ -166,16 +166,16 @@ describe('setup-node', () => {
|
||||
|
||||
inSpy.mockImplementation(name => inputs[name]);
|
||||
|
||||
let toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
const toolPath = path.normalize('/cache/node/12.0.0-rc.1/x64');
|
||||
findSpy.mockImplementation(() => toolPath);
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
expect(cnSpy).toHaveBeenCalledWith(`::add-path::${expPath}${osm.EOL}`);
|
||||
});
|
||||
|
||||
it('handles unhandled find error and reports error', async () => {
|
||||
let errMsg = 'unhandled error message';
|
||||
const errMsg = 'unhandled error message';
|
||||
inputs['node-version'] = '12.0.0-rc.1';
|
||||
|
||||
findSpy.mockImplementation(() => {
|
||||
@ -191,7 +191,7 @@ describe('setup-node', () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
let versionSpec = '13.0.0-rc.0';
|
||||
const versionSpec = '13.0.0-rc.0';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -201,13 +201,13 @@ describe('setup-node', () => {
|
||||
findSpy.mockImplementation(() => '');
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize('/cache/node/13.0.0-rc.0/x64');
|
||||
const toolPath = path.normalize('/cache/node/13.0.0-rc.0/x64');
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
await main.run();
|
||||
|
||||
let expPath = path.join(toolPath, 'bin');
|
||||
const expPath = path.join(toolPath, 'bin');
|
||||
|
||||
expect(dlSpy).toHaveBeenCalled();
|
||||
expect(exSpy).toHaveBeenCalled();
|
||||
@ -220,7 +220,7 @@ describe('setup-node', () => {
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
let versionSpec = '9.99.9-rc.1';
|
||||
const versionSpec = '9.99.9-rc.1';
|
||||
inputs['node-version'] = versionSpec;
|
||||
|
||||
findSpy.mockImplementation(() => '');
|
||||
@ -232,11 +232,11 @@ describe('setup-node', () => {
|
||||
});
|
||||
|
||||
it('reports a failed download', async () => {
|
||||
let errMsg = 'unhandled download message';
|
||||
const errMsg = 'unhandled download message';
|
||||
os.platform = 'linux';
|
||||
os.arch = 'x64';
|
||||
|
||||
let versionSpec = '14.7.0-rc.1';
|
||||
const versionSpec = '14.7.0-rc.1';
|
||||
|
||||
inputs['node-version'] = versionSpec;
|
||||
inputs['always-auth'] = false;
|
||||
@ -271,14 +271,14 @@ describe('setup-node', () => {
|
||||
inputs['always-auth'] = false;
|
||||
inputs['token'] = 'faketoken';
|
||||
|
||||
let expectedUrl = `https://nodejs.org/download/rc/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
const expectedUrl = `https://nodejs.org/download/rc/v${version}/node-v${version}-${platform}-${arch}.${fileExtension}`;
|
||||
|
||||
// ... but not in the local cache
|
||||
findSpy.mockImplementation(() => '');
|
||||
findAllVersionsSpy.mockImplementation(() => []);
|
||||
|
||||
dlSpy.mockImplementation(async () => '/some/temp/path');
|
||||
let toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
const toolPath = path.normalize(`/cache/node/${version}/${arch}`);
|
||||
exSpy.mockImplementation(async () => '/some/other/temp/path');
|
||||
cacheSpy.mockImplementation(async () => toolPath);
|
||||
|
||||
|
@ -25,7 +25,7 @@ inputs:
|
||||
description: 'Used to specify a package manager for caching in the default directory. Supported values: npm, yarn, pnpm.'
|
||||
cache-dependency-path:
|
||||
description: 'Used to specify the path to a dependency file: package-lock.json, yarn.lock, etc. Supports wildcards or a list of file names for caching multiple dependencies.'
|
||||
# TODO: add input to control forcing to pull from cloud or dist.
|
||||
# TODO: add input to control forcing to pull from cloud or dist.
|
||||
# escape valve for someone having issues or needing the absolute latest which isn't cached yet
|
||||
outputs:
|
||||
cache-hit:
|
||||
|
6731
dist/cache-save/index.js
vendored
6731
dist/cache-save/index.js
vendored
File diff suppressed because one or more lines are too long
5480
dist/setup/index.js
vendored
5480
dist/setup/index.js
vendored
File diff suppressed because one or more lines are too long
@ -24,7 +24,7 @@ To ensure that `yarn.lock` is always committed, use `yarn install --immutable` w
|
||||
**See also:**
|
||||
- [Documentation of `yarn.lock`](https://classic.yarnpkg.com/en/docs/yarn-lock)
|
||||
- [Documentation of `--frozen-lockfile` option](https://classic.yarnpkg.com/en/docs/cli/install#toc-yarn-install-frozen-lockfile)
|
||||
- [QA - Should lockfiles be committed to the repoistory?](https://yarnpkg.com/getting-started/qa/#should-lockfiles-be-committed-to-the-repository)
|
||||
- [QA - Should lockfiles be committed to the repository?](https://yarnpkg.com/getting-started/qa/#should-lockfiles-be-committed-to-the-repository)
|
||||
- [Documentation of `yarn install`](https://yarnpkg.com/cli/install)
|
||||
|
||||
### PNPM
|
||||
@ -261,10 +261,14 @@ steps:
|
||||
with:
|
||||
node-version: '14'
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
- run: pnpm install
|
||||
- run: pnpm test
|
||||
```
|
||||
|
||||
> **Note**: By default `--frozen-lockfile` option is passed starting from pnpm `6.10.x`. It will be automatically added if you run it on [CI](https://pnpm.io/cli/install#--frozen-lockfile).
|
||||
> If the `pnpm-lock.yaml` file changes then pass `--frozen-lockfile` option.
|
||||
|
||||
|
||||
**Using wildcard patterns to cache dependencies**
|
||||
```yaml
|
||||
steps:
|
||||
@ -401,11 +405,14 @@ steps:
|
||||
yarn config set npmScopes.my-org.npmAlwaysAuth true
|
||||
yarn config set npmScopes.my-org.npmAuthToken $NPM_AUTH_TOKEN
|
||||
env:
|
||||
NPM_AUTH_TOKEN: ${{ secrets.YARN_TOKEN }}
|
||||
NPM_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
```
|
||||
NOTE: As per https://github.com/actions/setup-node/issues/49 you cannot use `secrets.GITHUB_TOKEN` to access private GitHub Packages within the same organisation but in a different repository.
|
||||
|
||||
To access private GitHub Packages within the same organization, go to "Manage Actions access" in Package settings and set the repositories you want to access.
|
||||
|
||||
Please refer to the [Ensuring workflow access to your package - Configuring a package's access control and visibility](https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility#ensuring-workflow-access-to-your-package) for more details.
|
||||
|
||||
### always-auth input
|
||||
The always-auth input sets `always-auth=true` in .npmrc file. With this option set [npm](https://docs.npmjs.com/cli/v6/using-npm/config#always-auth)/yarn sends the authentication credentials when making a request to the registries.
|
||||
|
@ -52,7 +52,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub.
|
||||
|
||||
- Please check that no one else has already created a pull request with these or similar changes
|
||||
- Use a "feature branch" for your changes. That separates the changes in the pull request from your other changes and makes it easy to edit/amend commits in the pull request
|
||||
- **Run `pre-checkin` script to format, build and test changes**
|
||||
- **Run `pre-checkin` script to format, lint, build and test changes**
|
||||
- Make sure your changes are well formatted and that all tests are passing
|
||||
- If your pull request is connected to an open issue, please, leave a link to this issue in the `Related issue:` section
|
||||
- If you later need to add new commits to the pull request, you can simply commit the changes to the local branch and then push them. The pull request gets automatically updated
|
||||
@ -61,6 +61,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub.
|
||||
|
||||
- To implement new features or fix bugs, you need to make changes to the `.ts` files, which are located in the `src` folder
|
||||
- To comply with the code style, **you need to run the `format` script**
|
||||
- To lint the code, **you need to run the `lint:fix` script**
|
||||
- To transpile source code to `javascript` we use [NCC](https://github.com/vercel/ncc). **It is very important to run the `build` script after making changes**, otherwise your changes will not get into the final `javascript` build
|
||||
- You can also start formatting, building code, and testing with a single `pre-checkin` command
|
||||
|
||||
|
2734
package-lock.json
generated
2734
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
20
package.json
20
package.json
@ -6,11 +6,12 @@
|
||||
"main": "lib/setup-node.js",
|
||||
"scripts": {
|
||||
"build": "ncc build -o dist/setup src/setup-node.ts && ncc build -o dist/cache-save src/cache-save.ts",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"lint": "echo \"Fake command that does nothing. It is used in reusable workflows\"",
|
||||
"format": "prettier --no-error-on-unmatched-pattern --config ./.prettierrc.js --write \"**/*.{ts,yml,yaml}\"",
|
||||
"format-check": "prettier --no-error-on-unmatched-pattern --config ./.prettierrc.js --check \"**/*.{ts,yml,yaml}\"",
|
||||
"lint": "eslint --config ./.eslintrc.js \"**/*.ts\"",
|
||||
"lint:fix": "eslint --config ./.eslintrc.js \"**/*.ts\" --fix",
|
||||
"test": "jest --coverage",
|
||||
"pre-checkin": "npm run format && npm run build && npm test"
|
||||
"pre-checkin": "npm run format && npm run lint:fix && npm run build && npm test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -32,16 +33,23 @@
|
||||
"@actions/http-client": "^1.0.11",
|
||||
"@actions/io": "^1.0.2",
|
||||
"@actions/tool-cache": "^1.5.4",
|
||||
"semver": "^6.1.1"
|
||||
"semver": "^6.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.0.2",
|
||||
"@types/node": "^16.11.25",
|
||||
"@types/semver": "^6.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.54.0",
|
||||
"@typescript-eslint/parser": "^5.54.0",
|
||||
"@vercel/ncc": "^0.33.4",
|
||||
"eslint": "^8.35.0",
|
||||
"eslint-config-prettier": "^8.6.0",
|
||||
"eslint-plugin-jest": "^27.2.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"jest": "^27.2.5",
|
||||
"jest-circus": "^27.2.5",
|
||||
"prettier": "^1.19.1",
|
||||
"jest-each": "^27.2.5",
|
||||
"prettier": "^2.8.4",
|
||||
"ts-jest": "^27.0.5",
|
||||
"typescript": "^4.2.3"
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ function writeRegistryToFile(
|
||||
}
|
||||
|
||||
core.debug(`Setting auth in ${fileLocation}`);
|
||||
let newContents: string = '';
|
||||
let newContents = '';
|
||||
if (fs.existsSync(fileLocation)) {
|
||||
const curContents: string = fs.readFileSync(fileLocation, 'utf8');
|
||||
curContents.split(os.EOL).forEach((line: string) => {
|
||||
@ -46,8 +46,8 @@ function writeRegistryToFile(
|
||||
// Remove http: or https: from front of registry.
|
||||
const authString: string =
|
||||
registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}';
|
||||
const registryString: string = `${scope}registry=${registryUrl}`;
|
||||
const alwaysAuthString: string = `always-auth=${alwaysAuth}`;
|
||||
const registryString = `${scope}registry=${registryUrl}`;
|
||||
const alwaysAuthString = `always-auth=${alwaysAuth}`;
|
||||
newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`;
|
||||
fs.writeFileSync(fileLocation, newContents);
|
||||
core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation);
|
||||
|
@ -6,14 +6,15 @@ import fs from 'fs';
|
||||
|
||||
import {State} from './constants';
|
||||
import {
|
||||
getCacheDirectoryPath,
|
||||
getCacheDirectories,
|
||||
getPackageManagerInfo,
|
||||
repoHasYarnBerryManagedDependencies,
|
||||
PackageManagerInfo
|
||||
} from './cache-utils';
|
||||
|
||||
export const restoreCache = async (
|
||||
packageManager: string,
|
||||
cacheDependencyPath?: string
|
||||
cacheDependencyPath: string
|
||||
) => {
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
@ -21,10 +22,11 @@ export const restoreCache = async (
|
||||
}
|
||||
const platform = process.env.RUNNER_OS;
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
const cachePaths = await getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
cacheDependencyPath
|
||||
);
|
||||
core.saveState(State.CachePaths, cachePaths);
|
||||
const lockFilePath = cacheDependencyPath
|
||||
? cacheDependencyPath
|
||||
: findLockFile(packageManagerInfo);
|
||||
@ -36,12 +38,26 @@ export const restoreCache = async (
|
||||
);
|
||||
}
|
||||
|
||||
const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`;
|
||||
const keyPrefix = `node-cache-${platform}-${packageManager}`;
|
||||
const primaryKey = `${keyPrefix}-${fileHash}`;
|
||||
core.debug(`primary key is ${primaryKey}`);
|
||||
|
||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const cacheKey = await cache.restoreCache([cachePath], primaryKey);
|
||||
const isManagedByYarnBerry = await repoHasYarnBerryManagedDependencies(
|
||||
packageManagerInfo,
|
||||
cacheDependencyPath
|
||||
);
|
||||
let cacheKey: string | undefined;
|
||||
if (isManagedByYarnBerry) {
|
||||
core.info(
|
||||
'All dependencies are managed locally by yarn3, the previous cache can be used'
|
||||
);
|
||||
cacheKey = await cache.restoreCache(cachePaths, primaryKey, [keyPrefix]);
|
||||
} else {
|
||||
cacheKey = await cache.restoreCache(cachePaths, primaryKey);
|
||||
}
|
||||
|
||||
core.setOutput('cache-hit', Boolean(cacheKey));
|
||||
|
||||
if (!cacheKey) {
|
||||
@ -54,8 +70,9 @@ export const restoreCache = async (
|
||||
};
|
||||
|
||||
const findLockFile = (packageManager: PackageManagerInfo) => {
|
||||
let lockFiles = packageManager.lockFilePatterns;
|
||||
const lockFiles = packageManager.lockFilePatterns;
|
||||
const workspace = process.env.GITHUB_WORKSPACE!;
|
||||
|
||||
const rootContent = fs.readdirSync(workspace);
|
||||
|
||||
const lockFile = lockFiles.find(item => rootContent.includes(item));
|
||||
|
@ -1,8 +1,10 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
import fs from 'fs';
|
||||
|
||||
import {State} from './constants';
|
||||
import {getCacheDirectoryPath, getPackageManagerInfo} from './cache-utils';
|
||||
import {getPackageManagerInfo} from './cache-utils';
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
@ -14,7 +16,7 @@ process.on('uncaughtException', e => {
|
||||
|
||||
export async function run() {
|
||||
try {
|
||||
const cacheLock = core.getInput('cache');
|
||||
const cacheLock = core.getState(State.CachePackageManager);
|
||||
await cachePackages(cacheLock);
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
@ -24,6 +26,10 @@ export async function run() {
|
||||
const cachePackages = async (packageManager: string) => {
|
||||
const state = core.getState(State.CacheMatchedKey);
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
let cachePaths = JSON.parse(
|
||||
core.getState(State.CachePaths) || '[]'
|
||||
) as string[];
|
||||
cachePaths = cachePaths.filter(fs.existsSync);
|
||||
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
@ -31,14 +37,12 @@ const cachePackages = async (packageManager: string) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
);
|
||||
|
||||
if (!fs.existsSync(cachePath)) {
|
||||
if (!cachePaths.length) {
|
||||
// TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?)
|
||||
// export declare function getInput(name: string, options?: InputOptions): string;
|
||||
const cacheDependencyPath = core.getInput('cache-dependency-path') || '';
|
||||
throw new Error(
|
||||
`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`
|
||||
`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`
|
||||
);
|
||||
}
|
||||
|
||||
@ -49,7 +53,7 @@ const cachePackages = async (packageManager: string) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const cacheId = await cache.saveCache([cachePath], primaryKey);
|
||||
const cacheId = await cache.saveCache(cachePaths, primaryKey);
|
||||
if (cacheId == -1) {
|
||||
return;
|
||||
}
|
||||
|
@ -1,40 +1,79 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
type SupportedPackageManagers = {
|
||||
[prop: string]: PackageManagerInfo;
|
||||
};
|
||||
import * as glob from '@actions/glob';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import {unique} from './util';
|
||||
|
||||
export interface PackageManagerInfo {
|
||||
name: string;
|
||||
lockFilePatterns: Array<string>;
|
||||
getCacheFolderCommand: string;
|
||||
getCacheFolderPath: (projectDir?: string) => Promise<string>;
|
||||
}
|
||||
|
||||
interface SupportedPackageManagers {
|
||||
npm: PackageManagerInfo;
|
||||
pnpm: PackageManagerInfo;
|
||||
yarn: PackageManagerInfo;
|
||||
}
|
||||
export const supportedPackageManagers: SupportedPackageManagers = {
|
||||
npm: {
|
||||
name: 'npm',
|
||||
lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'],
|
||||
getCacheFolderCommand: 'npm config get cache'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'npm config get cache',
|
||||
'Could not get npm cache folder path'
|
||||
)
|
||||
},
|
||||
pnpm: {
|
||||
name: 'pnpm',
|
||||
lockFilePatterns: ['pnpm-lock.yaml'],
|
||||
getCacheFolderCommand: 'pnpm store path --silent'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'pnpm store path --silent',
|
||||
'Could not get pnpm cache folder path'
|
||||
)
|
||||
},
|
||||
yarn1: {
|
||||
yarn: {
|
||||
name: 'yarn',
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn cache dir'
|
||||
},
|
||||
yarn2: {
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn config get cacheFolder'
|
||||
getCacheFolderPath: async projectDir => {
|
||||
const yarnVersion = await getCommandOutputNotEmpty(
|
||||
`yarn --version`,
|
||||
'Could not retrieve version of yarn',
|
||||
projectDir
|
||||
);
|
||||
|
||||
core.debug(
|
||||
`Consumed yarn version is ${yarnVersion} (working dir: "${
|
||||
projectDir || ''
|
||||
}")`
|
||||
);
|
||||
|
||||
const stdOut = yarnVersion.startsWith('1.')
|
||||
? await getCommandOutput('yarn cache dir', projectDir)
|
||||
: await getCommandOutput('yarn config get cacheFolder', projectDir);
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(
|
||||
`Could not get yarn cache folder path for ${projectDir}`
|
||||
);
|
||||
}
|
||||
return stdOut;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const getCommandOutput = async (toolCommand: string) => {
|
||||
export const getCommandOutput = async (
|
||||
toolCommand: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
let {stdout, stderr, exitCode} = await exec.getExecOutput(
|
||||
toolCommand,
|
||||
undefined,
|
||||
{ignoreReturnCode: true}
|
||||
{ignoreReturnCode: true, ...(cwd && {cwd})}
|
||||
);
|
||||
|
||||
if (exitCode) {
|
||||
@ -47,16 +86,15 @@ export const getCommandOutput = async (toolCommand: string) => {
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
const getPackageManagerVersion = async (
|
||||
packageManager: string,
|
||||
command: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(`${packageManager} ${command}`);
|
||||
|
||||
export const getCommandOutputNotEmpty = async (
|
||||
toolCommand: string,
|
||||
error: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
const stdOut = getCommandOutput(toolCommand, cwd);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not retrieve version of ${packageManager}`);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
return stdOut;
|
||||
};
|
||||
|
||||
@ -66,35 +104,191 @@ export const getPackageManagerInfo = async (packageManager: string) => {
|
||||
} else if (packageManager === 'pnpm') {
|
||||
return supportedPackageManagers.pnpm;
|
||||
} else if (packageManager === 'yarn') {
|
||||
const yarnVersion = await getPackageManagerVersion('yarn', '--version');
|
||||
|
||||
core.debug(`Consumed yarn version is ${yarnVersion}`);
|
||||
|
||||
if (yarnVersion.startsWith('1.')) {
|
||||
return supportedPackageManagers.yarn1;
|
||||
} else {
|
||||
return supportedPackageManagers.yarn2;
|
||||
}
|
||||
return supportedPackageManagers.yarn;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const getCacheDirectoryPath = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
packageManager: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(
|
||||
packageManagerInfo.getCacheFolderCommand
|
||||
);
|
||||
/**
|
||||
* getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache`
|
||||
* - first through `getCacheDirectories`
|
||||
* - second from `repoHasYarn3ManagedCache`
|
||||
*
|
||||
* it contains expensive IO operation and thus should be memoized
|
||||
*/
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get cache folder path for ${packageManager}`);
|
||||
let projectDirectoriesMemoized: string[] | null = null;
|
||||
/**
|
||||
* unit test must reset memoized variables
|
||||
*/
|
||||
export const resetProjectDirectoriesMemoized = () =>
|
||||
(projectDirectoriesMemoized = null);
|
||||
/**
|
||||
* Expands (converts) the string input `cache-dependency-path` to list of directories that
|
||||
* may be project roots
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of directories and possible
|
||||
*/
|
||||
const getProjectDirectoriesFromCacheDependencyPath = async (
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
if (projectDirectoriesMemoized !== null) {
|
||||
return projectDirectoriesMemoized;
|
||||
}
|
||||
|
||||
core.debug(`${packageManager} path is ${stdOut}`);
|
||||
const globber = await glob.create(cacheDependencyPath);
|
||||
const cacheDependenciesPaths = await globber.glob();
|
||||
|
||||
return stdOut.trim();
|
||||
const existingDirectories: string[] = cacheDependenciesPaths
|
||||
.map(path.dirname)
|
||||
.filter(unique())
|
||||
.map(dirName => fs.realpathSync(dirName))
|
||||
.filter(directory => fs.lstatSync(directory).isDirectory());
|
||||
|
||||
if (!existingDirectories.length)
|
||||
core.warning(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`
|
||||
);
|
||||
|
||||
projectDirectoriesMemoized = existingDirectories;
|
||||
return existingDirectories;
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the cache directories configured for the repo if cache-dependency-path is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesFromCacheDependencyPath = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
const projectDirectories = await getProjectDirectoriesFromCacheDependencyPath(
|
||||
cacheDependencyPath
|
||||
);
|
||||
const cacheFoldersPaths = await Promise.all(
|
||||
projectDirectories.map(async projectDirectory => {
|
||||
const cacheFolderPath = await packageManagerInfo.getCacheFolderPath(
|
||||
projectDirectory
|
||||
);
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`
|
||||
);
|
||||
return cacheFolderPath;
|
||||
})
|
||||
);
|
||||
// uniq in order to do not cache the same directories twice
|
||||
return cacheFoldersPaths.filter(unique());
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesForRootProject = async (
|
||||
packageManagerInfo: PackageManagerInfo
|
||||
): Promise<string[]> => {
|
||||
const cacheFolderPath = await packageManagerInfo.getCacheFolderPath();
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`
|
||||
);
|
||||
return [cacheFolderPath];
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to find the cache directories configured for the repo
|
||||
* currently it handles only the case of PM=yarn && cacheDependencyPath is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
export const getCacheDirectories = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
// For yarn, if cacheDependencyPath is set, ask information about cache folders in each project
|
||||
// folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488
|
||||
if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) {
|
||||
return getCacheDirectoriesFromCacheDependencyPath(
|
||||
packageManagerInfo,
|
||||
cacheDependencyPath
|
||||
);
|
||||
}
|
||||
return getCacheDirectoriesForRootProject(packageManagerInfo);
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to check if the directory is a yarn project configured to manage
|
||||
* obsolete dependencies in the local cache
|
||||
* @param directory - a path to the folder
|
||||
* @return - true if the directory's project is yarn managed
|
||||
* - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false
|
||||
* - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false
|
||||
* - if local cache is not explicitly enabled (not yarn3), return false
|
||||
* - return true otherwise
|
||||
*/
|
||||
const projectHasYarnBerryManagedDependencies = async (
|
||||
directory: string
|
||||
): Promise<boolean> => {
|
||||
const workDir = directory || process.env.GITHUB_WORKSPACE || '.';
|
||||
core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`);
|
||||
|
||||
// if .yarn/cache directory exists the cache is managed by version control system
|
||||
const yarnCacheFile = path.join(workDir, '.yarn', 'cache');
|
||||
if (
|
||||
fs.existsSync(yarnCacheFile) &&
|
||||
fs.lstatSync(yarnCacheFile).isDirectory()
|
||||
) {
|
||||
core.debug(
|
||||
`"${workDir}" has .yarn/cache - dependencies are kept in the repository`
|
||||
);
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
// NOTE: yarn1 returns 'undefined' with return code = 0
|
||||
const enableGlobalCache = await getCommandOutput(
|
||||
'yarn config get enableGlobalCache',
|
||||
workDir
|
||||
);
|
||||
// only local cache is not managed by yarn
|
||||
const managed = enableGlobalCache.includes('false');
|
||||
if (managed) {
|
||||
core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`);
|
||||
return true;
|
||||
} else {
|
||||
core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to report the repo contains Yarn managed projects
|
||||
* @param packageManagerInfo - used to make sure current package manager is yarn
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return - true if all project directories configured to be Yarn managed
|
||||
*/
|
||||
export const repoHasYarnBerryManagedDependencies = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<boolean> => {
|
||||
if (packageManagerInfo.name !== 'yarn') return false;
|
||||
|
||||
const yarnDirs = cacheDependencyPath
|
||||
? await getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath)
|
||||
: [''];
|
||||
|
||||
const isManagedList = await Promise.all(
|
||||
yarnDirs.map(projectHasYarnBerryManagedDependencies)
|
||||
);
|
||||
|
||||
return isManagedList.every(Boolean);
|
||||
};
|
||||
|
||||
export function isGhes(): boolean {
|
||||
|
@ -5,8 +5,10 @@ export enum LockType {
|
||||
}
|
||||
|
||||
export enum State {
|
||||
CachePackageManager = 'SETUP_NODE_CACHE_PACKAGE_MANAGER',
|
||||
CachePrimaryKey = 'CACHE_KEY',
|
||||
CacheMatchedKey = 'CACHE_RESULT'
|
||||
CacheMatchedKey = 'CACHE_RESULT',
|
||||
CachePaths = 'CACHE_PATHS'
|
||||
}
|
||||
|
||||
export enum Outputs {
|
||||
|
@ -70,7 +70,7 @@ export default abstract class BaseDistribution {
|
||||
|
||||
core.debug(`evaluating ${versions.length} versions`);
|
||||
|
||||
for (let potential of versions) {
|
||||
for (const potential of versions) {
|
||||
const satisfied: boolean = semver.satisfies(potential, range, options);
|
||||
if (satisfied) {
|
||||
version = potential;
|
||||
@ -88,25 +88,29 @@ export default abstract class BaseDistribution {
|
||||
}
|
||||
|
||||
protected findVersionInHostedToolCacheDirectory() {
|
||||
return tc.find('node', this.nodeInfo.versionSpec, this.nodeInfo.arch);
|
||||
return tc.find(
|
||||
'node',
|
||||
this.nodeInfo.versionSpec,
|
||||
this.translateArchToDistUrl(this.nodeInfo.arch)
|
||||
);
|
||||
}
|
||||
|
||||
protected async getNodeJsVersions(): Promise<INodeVersion[]> {
|
||||
const initialUrl = this.getDistributionUrl();
|
||||
const dataUrl = `${initialUrl}/index.json`;
|
||||
|
||||
let response = await this.httpClient.getJson<INodeVersion[]>(dataUrl);
|
||||
const response = await this.httpClient.getJson<INodeVersion[]>(dataUrl);
|
||||
return response.result || [];
|
||||
}
|
||||
|
||||
protected getNodejsDistInfo(version: string) {
|
||||
let osArch: string = this.translateArchToDistUrl(this.nodeInfo.arch);
|
||||
const osArch: string = this.translateArchToDistUrl(this.nodeInfo.arch);
|
||||
version = semver.clean(version) || '';
|
||||
let fileName: string =
|
||||
const fileName: string =
|
||||
this.osPlat == 'win32'
|
||||
? `node-v${version}-win-${osArch}`
|
||||
: `node-v${version}-${this.osPlat}-${osArch}`;
|
||||
let urlFileName: string =
|
||||
const urlFileName: string =
|
||||
this.osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`;
|
||||
const initialUrl = this.getDistributionUrl();
|
||||
const url = `${initialUrl}/v${version}/${urlFileName}`;
|
||||
@ -127,8 +131,12 @@ export default abstract class BaseDistribution {
|
||||
try {
|
||||
downloadPath = await tc.downloadTool(info.downloadUrl);
|
||||
} catch (err) {
|
||||
if (err instanceof tc.HTTPError && err.httpStatusCode == 404) {
|
||||
return await this.acquireNodeFromFallbackLocation(
|
||||
if (
|
||||
err instanceof tc.HTTPError &&
|
||||
err.httpStatusCode == 404 &&
|
||||
this.osPlat == 'win32'
|
||||
) {
|
||||
return await this.acquireWindowsNodeFromFallbackLocation(
|
||||
info.resolvedVersion,
|
||||
info.arch
|
||||
);
|
||||
@ -137,7 +145,7 @@ export default abstract class BaseDistribution {
|
||||
throw err;
|
||||
}
|
||||
|
||||
let toolPath = await this.extractArchive(downloadPath, info);
|
||||
const toolPath = await this.extractArchive(downloadPath, info);
|
||||
core.info('Done');
|
||||
|
||||
return toolPath;
|
||||
@ -151,12 +159,12 @@ export default abstract class BaseDistribution {
|
||||
return {range: valid, options};
|
||||
}
|
||||
|
||||
protected async acquireNodeFromFallbackLocation(
|
||||
protected async acquireWindowsNodeFromFallbackLocation(
|
||||
version: string,
|
||||
arch: string = os.arch()
|
||||
): Promise<string> {
|
||||
const initialUrl = this.getDistributionUrl();
|
||||
let osArch: string = this.translateArchToDistUrl(arch);
|
||||
const osArch: string = this.translateArchToDistUrl(arch);
|
||||
|
||||
// Create temporary folder to download in to
|
||||
const tempDownloadFolder: string =
|
||||
@ -240,7 +248,7 @@ export default abstract class BaseDistribution {
|
||||
}
|
||||
|
||||
protected getDistFileName(): string {
|
||||
let osArch: string = this.translateArchToDistUrl(this.nodeInfo.arch);
|
||||
const osArch: string = this.translateArchToDistUrl(this.nodeInfo.arch);
|
||||
|
||||
// node offers a json list of versions
|
||||
let dataFileName: string;
|
||||
|
@ -9,6 +9,7 @@ import {restoreCache} from './cache-restore';
|
||||
import {isCacheFeatureAvailable} from './cache-utils';
|
||||
import {getNodejsDistribution} from './distributions/installer-factory';
|
||||
import {parseNodeVersionFile, printEnvDetailsAndSetOutput} from './util';
|
||||
import {State} from './constants';
|
||||
|
||||
export async function run() {
|
||||
try {
|
||||
@ -60,6 +61,7 @@ export async function run() {
|
||||
}
|
||||
|
||||
if (cache && isCacheFeatureAvailable()) {
|
||||
core.saveState(State.CachePackageManager, cache);
|
||||
const cacheDependencyPath = core.getInput('cache-dependency-path');
|
||||
await restoreCache(cache, cacheDependencyPath);
|
||||
}
|
||||
|
11
src/util.ts
11
src/util.ts
@ -13,7 +13,7 @@ export function parseNodeVersionFile(contents: string): string {
|
||||
}
|
||||
|
||||
if (!nodeVersion) {
|
||||
const found = contents.match(/^(?:nodejs\s+)?v?(?<version>[^\s]+)$/m);
|
||||
const found = contents.match(/^(?:node(js)?\s+)?v?(?<version>[^\s]+)$/m);
|
||||
nodeVersion = found?.groups?.version;
|
||||
}
|
||||
|
||||
@ -61,3 +61,12 @@ async function getToolVersion(tool: string, options: string[]) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export const unique = () => {
|
||||
const encountered = new Set();
|
||||
return (value: unknown): boolean => {
|
||||
if (encountered.has(value)) return false;
|
||||
encountered.add(value);
|
||||
return true;
|
||||
};
|
||||
};
|
||||
|
@ -7,7 +7,8 @@
|
||||
"sourceMap": true,
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
"noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
"resolveJsonModule": true, /* Allows importing modules with a '.json' extension, which is a common practice in node projects. */
|
||||
},
|
||||
"exclude": ["__tests__", "lib", "node_modules"]
|
||||
}
|
||||
|
Reference in New Issue
Block a user