You've already forked setup-dotnet
mirror of
https://github.com/actions/setup-dotnet.git
synced 2025-07-14 19:10:33 +07:00
Compare commits
3 Commits
add-publis
...
remove-fal
Author | SHA1 | Date | |
---|---|---|---|
8bcd8d8b49 | |||
0b32034241 | |||
aab9aab748 |
2
.github/workflows/check-dist.yml
vendored
2
.github/workflows/check-dist.yml
vendored
@ -45,7 +45,7 @@ jobs:
|
|||||||
id: diff
|
id: diff
|
||||||
|
|
||||||
# If index.js was different than expected, upload the expected version as an artifact
|
# If index.js was different than expected, upload the expected version as an artifact
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: dist
|
name: dist
|
||||||
|
2
.github/workflows/test-dotnet.yml
vendored
2
.github/workflows/test-dotnet.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
|
operating-system: [ubuntu-22.04, windows-latest, macOS-latest]
|
||||||
dotnet-version: ['2.1', '2.2', '3.0', '3.1', '5.0']
|
dotnet-version: ['2.1', '2.2', '3.0', '3.1', '5.0']
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
|
24
.github/workflows/workflow.yml
vendored
24
.github/workflows/workflow.yml
vendored
@ -39,7 +39,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
|
operating-system: [ubuntu-22.04, windows-latest, macOS-latest]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -62,7 +62,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
|
operating-system: [ubuntu-22.04, windows-latest, macos-13]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -95,7 +95,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
|
operating-system: [ubuntu-22.04, windows-latest, macOS-latest]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -120,7 +120,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
|
operating-system: [ubuntu-22.04, windows-latest, macOS-latest]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -144,7 +144,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
|
operating-system: [ubuntu-22.04, windows-latest, macOS-latest]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -168,7 +168,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
|
operating-system: [ubuntu-22.04, windows-latest, macOS-latest]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -190,18 +190,20 @@ jobs:
|
|||||||
run: __tests__/verify-dotnet.ps1 2.2 3.1
|
run: __tests__/verify-dotnet.ps1 2.2 3.1
|
||||||
|
|
||||||
test-proxy:
|
test-proxy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/dotnet/core/runtime-deps:3.0-bionic
|
image: ubuntu:20.04
|
||||||
options: --dns 127.0.0.1
|
options: --dns 127.0.0.1
|
||||||
services:
|
services:
|
||||||
squid-proxy:
|
squid-proxy:
|
||||||
image: datadog/squid:latest
|
image: ubuntu/squid:latest
|
||||||
ports:
|
ports:
|
||||||
- 3128:3128
|
- 3128:3128
|
||||||
env:
|
env:
|
||||||
https_proxy: http://squid-proxy:3128
|
https_proxy: http://squid-proxy:3128
|
||||||
http_proxy: http://squid-proxy:3128
|
http_proxy: http://squid-proxy:3128
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: true
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -210,7 +212,7 @@ jobs:
|
|||||||
- name: Install curl
|
- name: Install curl
|
||||||
run: |
|
run: |
|
||||||
apt update
|
apt update
|
||||||
apt -y install curl
|
apt -y install curl libssl1.1 libssl-dev
|
||||||
- name: Setup dotnet 3.1.201
|
- name: Setup dotnet 3.1.201
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
@ -222,7 +224,7 @@ jobs:
|
|||||||
run: __tests__/verify-dotnet.sh 3.1.201
|
run: __tests__/verify-dotnet.sh 3.1.201
|
||||||
|
|
||||||
test-bypass-proxy:
|
test-bypass-proxy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
https_proxy: http://no-such-proxy:3128
|
https_proxy: http://no-such-proxy:3128
|
||||||
no_proxy: github.com,dotnetcli.blob.core.windows.net,download.visualstudio.microsoft.com,api.nuget.org,dotnetcli.azureedge.net
|
no_proxy: github.com,dotnetcli.blob.core.windows.net,download.visualstudio.microsoft.com,api.nuget.org,dotnetcli.azureedge.net
|
||||||
|
@ -1,340 +1,336 @@
|
|||||||
import io = require('@actions/io');
|
import io = require('@actions/io');
|
||||||
import fs = require('fs');
|
import fs = require('fs');
|
||||||
import path = require('path');
|
import path = require('path');
|
||||||
|
|
||||||
const fakeSourcesDirForTesting = path.join(
|
const fakeSourcesDirForTesting = path.join(
|
||||||
__dirname,
|
__dirname,
|
||||||
'runner',
|
'runner',
|
||||||
path.join(
|
path.join(Math.random().toString(36).substring(7)),
|
||||||
Math.random()
|
's'
|
||||||
.toString(36)
|
);
|
||||||
.substring(7)
|
|
||||||
),
|
const invalidNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>`;
|
||||||
's'
|
|
||||||
);
|
const emptyNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<configuration>
|
||||||
const invalidNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>`;
|
</configuration>`;
|
||||||
|
|
||||||
const emptyNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
const nugetorgNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
<configuration>
|
<configuration>
|
||||||
</configuration>`;
|
<packageSources>
|
||||||
|
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" protocolVersion="3" />
|
||||||
const nugetorgNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
</packageSources>
|
||||||
<configuration>
|
</configuration>`;
|
||||||
<packageSources>
|
|
||||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" protocolVersion="3" />
|
const gprnugetorgNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
</packageSources>
|
<configuration>
|
||||||
</configuration>`;
|
<packageSources>
|
||||||
|
<add key="GPR" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
||||||
const gprnugetorgNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" protocolVersion="3" />
|
||||||
<configuration>
|
</packageSources>
|
||||||
<packageSources>
|
</configuration>`;
|
||||||
<add key="GPR" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
|
||||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" protocolVersion="3" />
|
const gprNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
</packageSources>
|
<configuration>
|
||||||
</configuration>`;
|
<packageSources>
|
||||||
|
<add key="GPR" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
||||||
const gprNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
</packageSources>
|
||||||
<configuration>
|
</configuration>`;
|
||||||
<packageSources>
|
|
||||||
<add key="GPR" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
const twogprNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
</packageSources>
|
<configuration>
|
||||||
</configuration>`;
|
<packageSources>
|
||||||
|
<add key="GPR-GitHub" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
||||||
const twogprNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
<add key="GPR-Actions" value="https://nuget.pkg.github.com/actions/index.json" protocolVersion="3" />
|
||||||
<configuration>
|
</packageSources>
|
||||||
<packageSources>
|
</configuration>`;
|
||||||
<add key="GPR-GitHub" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
|
||||||
<add key="GPR-Actions" value="https://nuget.pkg.github.com/actions/index.json" protocolVersion="3" />
|
const spaceNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
</packageSources>
|
<configuration>
|
||||||
</configuration>`;
|
<packageSources>
|
||||||
|
<add key="GPR GitHub" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
||||||
const spaceNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
</packageSources>
|
||||||
<configuration>
|
</configuration>`;
|
||||||
<packageSources>
|
|
||||||
<add key="GPR GitHub" value="https://nuget.pkg.github.com/OwnerName/index.json" protocolVersion="3" />
|
const azureartifactsNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
</packageSources>
|
<configuration>
|
||||||
</configuration>`;
|
<packageSources>
|
||||||
|
<add key="AzureArtifacts" value="https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json" protocolVersion="3" />
|
||||||
const azureartifactsNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
</packageSources>
|
||||||
<configuration>
|
</configuration>`;
|
||||||
<packageSources>
|
|
||||||
<add key="AzureArtifacts" value="https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json" protocolVersion="3" />
|
const azureartifactsnugetorgNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
||||||
</packageSources>
|
<configuration>
|
||||||
</configuration>`;
|
<packageSources>
|
||||||
|
<add key="AzureArtifacts" value="https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json" protocolVersion="3" />
|
||||||
const azureartifactsnugetorgNuGetConfig: string = `<?xml version="1.0" encoding="utf-8"?>
|
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" protocolVersion="3" />
|
||||||
<configuration>
|
</packageSources>
|
||||||
<packageSources>
|
</configuration>`;
|
||||||
<add key="AzureArtifacts" value="https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json" protocolVersion="3" />
|
|
||||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" protocolVersion="3" />
|
// We want a NuGet.config one level above the sources directory, so it doesn't trample a user's NuGet.config but is still picked up by NuGet/dotnet.
|
||||||
</packageSources>
|
const nugetConfigFile = path.join(fakeSourcesDirForTesting, '../nuget.config');
|
||||||
</configuration>`;
|
|
||||||
|
process.env['GITHUB_REPOSITORY'] = 'OwnerName/repo';
|
||||||
// We want a NuGet.config one level above the sources directory, so it doesn't trample a user's NuGet.config but is still picked up by NuGet/dotnet.
|
import * as auth from '../src/authutil';
|
||||||
const nugetConfigFile = path.join(fakeSourcesDirForTesting, '../nuget.config');
|
|
||||||
|
describe('authutil tests', () => {
|
||||||
process.env['GITHUB_REPOSITORY'] = 'OwnerName/repo';
|
beforeEach(async () => {
|
||||||
import * as auth from '../src/authutil';
|
await io.rmRF(fakeSourcesDirForTesting);
|
||||||
|
await io.mkdirP(fakeSourcesDirForTesting);
|
||||||
describe('authutil tests', () => {
|
}, 30000);
|
||||||
beforeEach(async () => {
|
|
||||||
await io.rmRF(fakeSourcesDirForTesting);
|
afterAll(async () => {
|
||||||
await io.mkdirP(fakeSourcesDirForTesting);
|
await io.rmRF(fakeSourcesDirForTesting);
|
||||||
}, 30000);
|
}, 30000);
|
||||||
|
|
||||||
afterAll(async () => {
|
beforeEach(() => {
|
||||||
await io.rmRF(fakeSourcesDirForTesting);
|
if (fs.existsSync(nugetConfigFile)) {
|
||||||
}, 30000);
|
fs.unlinkSync(nugetConfigFile);
|
||||||
|
}
|
||||||
beforeEach(() => {
|
process.env['INPUT_OWNER'] = '';
|
||||||
if (fs.existsSync(nugetConfigFile)) {
|
process.env['NUGET_AUTH_TOKEN'] = '';
|
||||||
fs.unlinkSync(nugetConfigFile);
|
});
|
||||||
}
|
|
||||||
process.env['INPUT_OWNER'] = '';
|
it('No existing config, sets up a full NuGet.config with URL and user/PAT for GPR', async () => {
|
||||||
process.env['NUGET_AUTH_TOKEN'] = '';
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
await auth.configAuthentication(
|
||||||
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
it('No existing config, sets up a full NuGet.config with URL and user/PAT for GPR', async () => {
|
'',
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('No existing config, auth token environment variable not provided, throws', async () => {
|
||||||
).toMatchSnapshot();
|
let thrown = false;
|
||||||
});
|
try {
|
||||||
|
await auth.configAuthentication(
|
||||||
it('No existing config, auth token environment variable not provided, throws', async () => {
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
let thrown = false;
|
'',
|
||||||
try {
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
} catch {
|
||||||
'',
|
thrown = true;
|
||||||
fakeSourcesDirForTesting
|
}
|
||||||
);
|
expect(thrown).toBe(true);
|
||||||
} catch {
|
});
|
||||||
thrown = true;
|
|
||||||
}
|
it('No existing config, sets up a full NuGet.config with URL and other owner/PAT for GPR', async () => {
|
||||||
expect(thrown).toBe(true);
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
process.env['INPUT_OWNER'] = 'otherorg';
|
||||||
|
await auth.configAuthentication(
|
||||||
it('No existing config, sets up a full NuGet.config with URL and other owner/PAT for GPR', async () => {
|
'https://nuget.pkg.github.com/otherorg/index.json',
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
'',
|
||||||
process.env['INPUT_OWNER'] = 'otherorg';
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/otherorg/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config (invalid), tries to parse an invalid NuGet.config and throws', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config (invalid), tries to parse an invalid NuGet.config and throws', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, invalidNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
let thrown = false;
|
||||||
'nuget.config'
|
try {
|
||||||
);
|
await auth.configAuthentication(
|
||||||
fs.writeFileSync(inputNuGetConfigPath, invalidNuGetConfig);
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
let thrown = false;
|
'',
|
||||||
try {
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
} catch {
|
||||||
'',
|
thrown = true;
|
||||||
fakeSourcesDirForTesting
|
}
|
||||||
);
|
expect(thrown).toBe(true);
|
||||||
} catch {
|
});
|
||||||
thrown = true;
|
|
||||||
}
|
it('Existing config w/ no sources, sets up a full NuGet.config with URL and user/PAT for GPR', async () => {
|
||||||
expect(thrown).toBe(true);
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ no sources, sets up a full NuGet.config with URL and user/PAT for GPR', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, emptyNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
await auth.configAuthentication(
|
||||||
'nuget.config'
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
);
|
'',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, emptyNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config w/ no GPR sources, sets up a full NuGet.config with URL and user/PAT for GPR', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ no GPR sources, sets up a full NuGet.config with URL and user/PAT for GPR', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, nugetorgNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
await auth.configAuthentication(
|
||||||
'nuget.config'
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
);
|
'',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, nugetorgNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config w/ only GPR source, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ only GPR source, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, gprNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
await auth.configAuthentication(
|
||||||
'nuget.config'
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
);
|
'',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, gprNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config w/ GPR source and NuGet.org, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ GPR source and NuGet.org, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, gprnugetorgNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
await auth.configAuthentication(
|
||||||
'nuget.config'
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
);
|
'',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, gprnugetorgNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config w/ two GPR sources, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ two GPR sources, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, twogprNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
await auth.configAuthentication(
|
||||||
'nuget.config'
|
'https://nuget.pkg.github.com',
|
||||||
);
|
'',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, twogprNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config w/ spaces in key, throws for now', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ spaces in key, throws for now', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, spaceNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
let thrown = false;
|
||||||
'nuget.config'
|
try {
|
||||||
);
|
await auth.configAuthentication(
|
||||||
fs.writeFileSync(inputNuGetConfigPath, spaceNuGetConfig);
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
let thrown = false;
|
'',
|
||||||
try {
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
} catch {
|
||||||
'',
|
thrown = true;
|
||||||
fakeSourcesDirForTesting
|
}
|
||||||
);
|
expect(thrown).toBe(true);
|
||||||
} catch {
|
});
|
||||||
thrown = true;
|
|
||||||
}
|
it('Existing config not in repo root, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
||||||
expect(thrown).toBe(true);
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigDirectory: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config not in repo root, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
'subfolder'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigDirectory: string = path.join(
|
const inputNuGetConfigPath: string = path.join(
|
||||||
fakeSourcesDirForTesting,
|
inputNuGetConfigDirectory,
|
||||||
'subfolder'
|
'nuget.config'
|
||||||
);
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.mkdirSync(inputNuGetConfigDirectory, {recursive: true});
|
||||||
inputNuGetConfigDirectory,
|
fs.writeFileSync(inputNuGetConfigPath, gprNuGetConfig);
|
||||||
'nuget.config'
|
await auth.configAuthentication(
|
||||||
);
|
'https://nuget.pkg.github.com/OwnerName/index.json',
|
||||||
fs.mkdirSync(inputNuGetConfigDirectory, {recursive: true});
|
'subfolder/nuget.config',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, gprNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://nuget.pkg.github.com/OwnerName/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'subfolder/nuget.config',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config w/ only Azure Artifacts source, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ only Azure Artifacts source, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, azureartifactsNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
await auth.configAuthentication(
|
||||||
'nuget.config'
|
'https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json',
|
||||||
);
|
'',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, azureartifactsNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('Existing config w/ Azure Artifacts source and NuGet.org, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
const inputNuGetConfigPath: string = path.join(
|
||||||
|
fakeSourcesDirForTesting,
|
||||||
it('Existing config w/ Azure Artifacts source and NuGet.org, sets up a partial NuGet.config user/PAT for GPR', async () => {
|
'nuget.config'
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
);
|
||||||
const inputNuGetConfigPath: string = path.join(
|
fs.writeFileSync(inputNuGetConfigPath, azureartifactsnugetorgNuGetConfig);
|
||||||
fakeSourcesDirForTesting,
|
await auth.configAuthentication(
|
||||||
'nuget.config'
|
'https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json',
|
||||||
);
|
'',
|
||||||
fs.writeFileSync(inputNuGetConfigPath, azureartifactsnugetorgNuGetConfig);
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
it('No existing config, sets up a full NuGet.config with URL and token for other source', async () => {
|
||||||
).toMatchSnapshot();
|
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
||||||
});
|
await auth.configAuthentication(
|
||||||
|
'https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json',
|
||||||
it('No existing config, sets up a full NuGet.config with URL and token for other source', async () => {
|
'',
|
||||||
process.env['NUGET_AUTH_TOKEN'] = 'TEST_FAKE_AUTH_TOKEN';
|
fakeSourcesDirForTesting
|
||||||
await auth.configAuthentication(
|
);
|
||||||
'https://pkgs.dev.azure.com/amullans/_packaging/GitHubBuilds/nuget/v3/index.json',
|
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
||||||
'',
|
expect(
|
||||||
fakeSourcesDirForTesting
|
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
||||||
);
|
).toMatchSnapshot();
|
||||||
expect(fs.existsSync(nugetConfigFile)).toBe(true);
|
});
|
||||||
expect(
|
});
|
||||||
fs.readFileSync(nugetConfigFile, {encoding: 'utf8'})
|
|
||||||
).toMatchSnapshot();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
2604
dist/index.js
vendored
2604
dist/index.js
vendored
File diff suppressed because it is too large
Load Diff
3113
externals/install-dotnet.ps1
vendored
3113
externals/install-dotnet.ps1
vendored
File diff suppressed because it is too large
Load Diff
315
externals/install-dotnet.sh
vendored
315
externals/install-dotnet.sh
vendored
@ -298,11 +298,20 @@ get_machine_architecture() {
|
|||||||
if command -v uname > /dev/null; then
|
if command -v uname > /dev/null; then
|
||||||
CPUName=$(uname -m)
|
CPUName=$(uname -m)
|
||||||
case $CPUName in
|
case $CPUName in
|
||||||
|
armv1*|armv2*|armv3*|armv4*|armv5*|armv6*)
|
||||||
|
echo "armv6-or-below"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
armv*l)
|
armv*l)
|
||||||
echo "arm"
|
echo "arm"
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
aarch64|arm64)
|
aarch64|arm64)
|
||||||
|
if [ "$(getconf LONG_BIT)" -lt 64 ]; then
|
||||||
|
# This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
|
||||||
|
echo "arm"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
echo "arm64"
|
echo "arm64"
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
@ -310,6 +319,22 @@ get_machine_architecture() {
|
|||||||
echo "s390x"
|
echo "s390x"
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
|
ppc64le)
|
||||||
|
echo "ppc64le"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
loongarch64)
|
||||||
|
echo "loongarch64"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
riscv64)
|
||||||
|
echo "riscv64"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
powerpc|ppc)
|
||||||
|
echo "ppc"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -326,7 +351,13 @@ get_normalized_architecture_from_architecture() {
|
|||||||
local architecture="$(to_lowercase "$1")"
|
local architecture="$(to_lowercase "$1")"
|
||||||
|
|
||||||
if [[ $architecture == \<auto\> ]]; then
|
if [[ $architecture == \<auto\> ]]; then
|
||||||
echo "$(get_machine_architecture)"
|
machine_architecture="$(get_machine_architecture)"
|
||||||
|
if [[ "$machine_architecture" == "armv6-or-below" ]]; then
|
||||||
|
say_err "Architecture \`$machine_architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo $machine_architecture
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -347,6 +378,14 @@ get_normalized_architecture_from_architecture() {
|
|||||||
echo "s390x"
|
echo "s390x"
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
|
ppc64le)
|
||||||
|
echo "ppc64le"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
loongarch64)
|
||||||
|
echo "loongarch64"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
|
say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
|
||||||
@ -384,11 +423,17 @@ get_normalized_architecture_for_specific_sdk_version() {
|
|||||||
# args:
|
# args:
|
||||||
# version or channel - $1
|
# version or channel - $1
|
||||||
is_arm64_supported() {
|
is_arm64_supported() {
|
||||||
#any channel or version that starts with the specified versions
|
# Extract the major version by splitting on the dot
|
||||||
case "$1" in
|
major_version="${1%%.*}"
|
||||||
( "1"* | "2"* | "3"* | "4"* | "5"*)
|
|
||||||
echo false
|
# Check if the major version is a valid number and less than 6
|
||||||
return 0
|
case "$major_version" in
|
||||||
|
[0-9]*)
|
||||||
|
if [ "$major_version" -lt 6 ]; then
|
||||||
|
echo false
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
echo true
|
echo true
|
||||||
@ -407,8 +452,13 @@ get_normalized_os() {
|
|||||||
echo "$osname"
|
echo "$osname"
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
|
macos)
|
||||||
|
osname='osx'
|
||||||
|
echo "$osname"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
|
say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, macos, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
|
||||||
return 1
|
return 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
@ -451,6 +501,10 @@ get_normalized_channel() {
|
|||||||
|
|
||||||
local channel="$(to_lowercase "$1")"
|
local channel="$(to_lowercase "$1")"
|
||||||
|
|
||||||
|
if [[ $channel == current ]]; then
|
||||||
|
say_warning 'Value "Current" is deprecated for -Channel option. Use "STS" instead.'
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ $channel == release/* ]]; then
|
if [[ $channel == release/* ]]; then
|
||||||
say_warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead.';
|
say_warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead.';
|
||||||
fi
|
fi
|
||||||
@ -461,6 +515,14 @@ get_normalized_channel() {
|
|||||||
echo "LTS"
|
echo "LTS"
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
|
sts)
|
||||||
|
echo "STS"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
current)
|
||||||
|
echo "STS"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
echo "$channel"
|
echo "$channel"
|
||||||
return 0
|
return 0
|
||||||
@ -526,6 +588,40 @@ is_dotnet_package_installed() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# args:
|
||||||
|
# downloaded file - $1
|
||||||
|
# remote_file_size - $2
|
||||||
|
validate_remote_local_file_sizes()
|
||||||
|
{
|
||||||
|
eval $invocation
|
||||||
|
|
||||||
|
local downloaded_file="$1"
|
||||||
|
local remote_file_size="$2"
|
||||||
|
local file_size=''
|
||||||
|
|
||||||
|
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||||
|
file_size="$(stat -c '%s' "$downloaded_file")"
|
||||||
|
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
|
# hardcode in order to avoid conflicts with GNU stat
|
||||||
|
file_size="$(/usr/bin/stat -f '%z' "$downloaded_file")"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$file_size" ]; then
|
||||||
|
say "Downloaded file size is $file_size bytes."
|
||||||
|
|
||||||
|
if [ -n "$remote_file_size" ] && [ -n "$file_size" ]; then
|
||||||
|
if [ "$remote_file_size" -ne "$file_size" ]; then
|
||||||
|
say "The remote and local file sizes are not equal. The remote file size is $remote_file_size bytes and the local size is $file_size bytes. The local package may be corrupted."
|
||||||
|
else
|
||||||
|
say "The remote and local file sizes are equal."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
else
|
||||||
|
say "Either downloaded or local package size can not be measured. One of them may be corrupted."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# args:
|
# args:
|
||||||
# azure_feed - $1
|
# azure_feed - $1
|
||||||
# channel - $2
|
# channel - $2
|
||||||
@ -860,6 +956,37 @@ get_absolute_path() {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# args:
|
||||||
|
# override - $1 (boolean, true or false)
|
||||||
|
get_cp_options() {
|
||||||
|
eval $invocation
|
||||||
|
|
||||||
|
local override="$1"
|
||||||
|
local override_switch=""
|
||||||
|
|
||||||
|
if [ "$override" = false ]; then
|
||||||
|
override_switch="-n"
|
||||||
|
|
||||||
|
# create temporary files to check if 'cp -u' is supported
|
||||||
|
tmp_dir="$(mktemp -d)"
|
||||||
|
tmp_file="$tmp_dir/testfile"
|
||||||
|
tmp_file2="$tmp_dir/testfile2"
|
||||||
|
|
||||||
|
touch "$tmp_file"
|
||||||
|
|
||||||
|
# use -u instead of -n if it's available
|
||||||
|
if cp -u "$tmp_file" "$tmp_file2" 2>/dev/null; then
|
||||||
|
override_switch="-u"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# clean up
|
||||||
|
rm -f "$tmp_file" "$tmp_file2"
|
||||||
|
rm -rf "$tmp_dir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$override_switch"
|
||||||
|
}
|
||||||
|
|
||||||
# args:
|
# args:
|
||||||
# input_files - stdin
|
# input_files - stdin
|
||||||
# root_path - $1
|
# root_path - $1
|
||||||
@ -871,15 +998,7 @@ copy_files_or_dirs_from_list() {
|
|||||||
local root_path="$(remove_trailing_slash "$1")"
|
local root_path="$(remove_trailing_slash "$1")"
|
||||||
local out_path="$(remove_trailing_slash "$2")"
|
local out_path="$(remove_trailing_slash "$2")"
|
||||||
local override="$3"
|
local override="$3"
|
||||||
local osname="$(get_current_os_name)"
|
local override_switch="$(get_cp_options "$override")"
|
||||||
local override_switch=$(
|
|
||||||
if [ "$override" = false ]; then
|
|
||||||
if [ "$osname" = "linux-musl" ]; then
|
|
||||||
printf -- "-u";
|
|
||||||
else
|
|
||||||
printf -- "-n";
|
|
||||||
fi
|
|
||||||
fi)
|
|
||||||
|
|
||||||
cat | uniq | while read -r file_path; do
|
cat | uniq | while read -r file_path; do
|
||||||
local path="$(remove_beginning_slash "${file_path#$root_path}")"
|
local path="$(remove_beginning_slash "${file_path#$root_path}")"
|
||||||
@ -894,14 +1013,39 @@ copy_files_or_dirs_from_list() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# args:
|
||||||
|
# zip_uri - $1
|
||||||
|
get_remote_file_size() {
|
||||||
|
local zip_uri="$1"
|
||||||
|
|
||||||
|
if machine_has "curl"; then
|
||||||
|
file_size=$(curl -sI "$zip_uri" | grep -i content-length | awk '{ num = $2 + 0; print num }')
|
||||||
|
elif machine_has "wget"; then
|
||||||
|
file_size=$(wget --spider --server-response -O /dev/null "$zip_uri" 2>&1 | grep -i 'Content-Length:' | awk '{ num = $2 + 0; print num }')
|
||||||
|
else
|
||||||
|
say "Neither curl nor wget is available on this system."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$file_size" ]; then
|
||||||
|
say "Remote file $zip_uri size is $file_size bytes."
|
||||||
|
echo "$file_size"
|
||||||
|
else
|
||||||
|
say_verbose "Content-Length header was not extracted for $zip_uri."
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# args:
|
# args:
|
||||||
# zip_path - $1
|
# zip_path - $1
|
||||||
# out_path - $2
|
# out_path - $2
|
||||||
|
# remote_file_size - $3
|
||||||
extract_dotnet_package() {
|
extract_dotnet_package() {
|
||||||
eval $invocation
|
eval $invocation
|
||||||
|
|
||||||
local zip_path="$1"
|
local zip_path="$1"
|
||||||
local out_path="$2"
|
local out_path="$2"
|
||||||
|
local remote_file_size="$3"
|
||||||
|
|
||||||
local temp_out_path="$(mktemp -d "$temporary_file_template")"
|
local temp_out_path="$(mktemp -d "$temporary_file_template")"
|
||||||
|
|
||||||
@ -911,9 +1055,13 @@ extract_dotnet_package() {
|
|||||||
local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
|
local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
|
||||||
find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
|
find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
|
||||||
find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
|
find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
|
||||||
|
|
||||||
|
validate_remote_local_file_sizes "$zip_path" "$remote_file_size"
|
||||||
|
|
||||||
rm -rf "$temp_out_path"
|
rm -rf "$temp_out_path"
|
||||||
rm -f "$zip_path" && say_verbose "Temporary zip file $zip_path was removed"
|
if [ -z ${keep_zip+x} ]; then
|
||||||
|
rm -f "$zip_path" && say_verbose "Temporary archive file $zip_path was removed"
|
||||||
|
fi
|
||||||
|
|
||||||
if [ "$failed" = true ]; then
|
if [ "$failed" = true ]; then
|
||||||
say_err "Extraction failed"
|
say_err "Extraction failed"
|
||||||
@ -1124,13 +1272,69 @@ downloadwget() {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
extract_stem() {
|
||||||
|
local url="$1"
|
||||||
|
# extract the protocol
|
||||||
|
proto="$(echo $1 | grep :// | sed -e's,^\(.*://\).*,\1,g')"
|
||||||
|
# remove the protocol
|
||||||
|
url="${1/$proto/}"
|
||||||
|
# extract the path (if any) - since we know all of our feeds have a first path segment, we can skip the first one. otherwise we'd use -f2- to get the full path
|
||||||
|
full_path="$(echo $url | grep / | cut -d/ -f2-)"
|
||||||
|
path="$(echo $full_path | cut -d/ -f2-)"
|
||||||
|
echo $path
|
||||||
|
}
|
||||||
|
|
||||||
|
check_url_exists() {
|
||||||
|
eval $invocation
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
local code=""
|
||||||
|
if machine_has "curl"
|
||||||
|
then
|
||||||
|
code=$(curl --head -o /dev/null -w "%{http_code}" -s --fail "$url");
|
||||||
|
elif machine_has "wget"
|
||||||
|
then
|
||||||
|
# get the http response, grab the status code
|
||||||
|
server_response=$(wget -qO- --method=HEAD --server-response "$url" 2>&1)
|
||||||
|
code=$(echo "$server_response" | grep "HTTP/" | awk '{print $2}')
|
||||||
|
fi
|
||||||
|
if [ $code = "200" ]; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
sanitize_redirect_url() {
|
||||||
|
eval $invocation
|
||||||
|
|
||||||
|
local url_stem
|
||||||
|
url_stem=$(extract_stem "$1")
|
||||||
|
say_verbose "Checking configured feeds for the asset at ${yellow:-}$url_stem${normal:-}"
|
||||||
|
|
||||||
|
for feed in "${feeds[@]}"
|
||||||
|
do
|
||||||
|
local trial_url="$feed/$url_stem"
|
||||||
|
say_verbose "Checking ${yellow:-}$trial_url${normal:-}"
|
||||||
|
if check_url_exists "$trial_url"; then
|
||||||
|
say_verbose "Found a match at ${yellow:-}$trial_url${normal:-}"
|
||||||
|
echo "$trial_url"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
say_verbose "No match at ${yellow:-}$trial_url${normal:-}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
get_download_link_from_aka_ms() {
|
get_download_link_from_aka_ms() {
|
||||||
eval $invocation
|
eval $invocation
|
||||||
|
|
||||||
#quality is not supported for LTS or current channel
|
#quality is not supported for LTS or STS channel
|
||||||
if [[ ! -z "$normalized_quality" && ("$normalized_channel" == "LTS" || "$normalized_channel" == "current") ]]; then
|
#STS maps to current
|
||||||
|
if [[ ! -z "$normalized_quality" && ("$normalized_channel" == "LTS" || "$normalized_channel" == "STS") ]]; then
|
||||||
normalized_quality=""
|
normalized_quality=""
|
||||||
say_warning "Specifying quality for current or LTS channel is not supported, the quality will be ignored."
|
say_warning "Specifying quality for STS or LTS channel is not supported, the quality will be ignored."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
say_verbose "Retrieving primary payload URL from aka.ms for channel: '$normalized_channel', quality: '$normalized_quality', product: '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
|
say_verbose "Retrieving primary payload URL from aka.ms for channel: '$normalized_channel', quality: '$normalized_quality', product: '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
|
||||||
@ -1159,6 +1363,12 @@ get_download_link_from_aka_ms() {
|
|||||||
http_codes=$( echo "$response" | awk '$1 ~ /^HTTP/ {print $2}' )
|
http_codes=$( echo "$response" | awk '$1 ~ /^HTTP/ {print $2}' )
|
||||||
# They all need to be 301, otherwise some links are broken (except for the last, which is not a redirect but 200 or 404).
|
# They all need to be 301, otherwise some links are broken (except for the last, which is not a redirect but 200 or 404).
|
||||||
broken_redirects=$( echo "$http_codes" | sed '$d' | grep -v '301' )
|
broken_redirects=$( echo "$http_codes" | sed '$d' | grep -v '301' )
|
||||||
|
# The response may end without final code 2xx/4xx/5xx somehow, e.g. network restrictions on www.bing.com causes redirecting to bing.com fails with connection refused.
|
||||||
|
# In this case it should not exclude the last.
|
||||||
|
last_http_code=$( echo "$http_codes" | tail -n 1 )
|
||||||
|
if ! [[ $last_http_code =~ ^(2|4|5)[0-9][0-9]$ ]]; then
|
||||||
|
broken_redirects=$( echo "$http_codes" | grep -v '301' )
|
||||||
|
fi
|
||||||
|
|
||||||
# All HTTP codes are 301 (Moved Permanently), the redirect link exists.
|
# All HTTP codes are 301 (Moved Permanently), the redirect link exists.
|
||||||
if [[ -z "$broken_redirects" ]]; then
|
if [[ -z "$broken_redirects" ]]; then
|
||||||
@ -1169,6 +1379,11 @@ get_download_link_from_aka_ms() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
sanitized_redirect_url=$(sanitize_redirect_url "$aka_ms_download_link")
|
||||||
|
if [[ -n "$sanitized_redirect_url" ]]; then
|
||||||
|
aka_ms_download_link="$sanitized_redirect_url"
|
||||||
|
fi
|
||||||
|
|
||||||
say_verbose "The redirect location retrieved: '$aka_ms_download_link'."
|
say_verbose "The redirect location retrieved: '$aka_ms_download_link'."
|
||||||
return 0
|
return 0
|
||||||
else
|
else
|
||||||
@ -1180,7 +1395,9 @@ get_download_link_from_aka_ms() {
|
|||||||
get_feeds_to_use()
|
get_feeds_to_use()
|
||||||
{
|
{
|
||||||
feeds=(
|
feeds=(
|
||||||
|
"https://builds.dotnet.microsoft.com/dotnet"
|
||||||
"https://dotnetcli.azureedge.net/dotnet"
|
"https://dotnetcli.azureedge.net/dotnet"
|
||||||
|
"https://ci.dot.net/public"
|
||||||
"https://dotnetbuilds.azureedge.net/public"
|
"https://dotnetbuilds.azureedge.net/public"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1239,7 +1456,7 @@ generate_akams_links() {
|
|||||||
|
|
||||||
normalized_version="$(to_lowercase "$version")"
|
normalized_version="$(to_lowercase "$version")"
|
||||||
if [[ "$normalized_version" != "latest" ]] && [ -n "$normalized_quality" ]; then
|
if [[ "$normalized_version" != "latest" ]] && [ -n "$normalized_quality" ]; then
|
||||||
say_err "Quality and Version options are not allowed to be specified simultaneously. See https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-install-script#options for details."
|
say_err "Quality and Version options are not allowed to be specified simultaneously. See https://learn.microsoft.com/dotnet/core/tools/dotnet-install-script#options for details."
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -1406,10 +1623,11 @@ install_dotnet() {
|
|||||||
eval $invocation
|
eval $invocation
|
||||||
local download_failed=false
|
local download_failed=false
|
||||||
local download_completed=false
|
local download_completed=false
|
||||||
|
local remote_file_size=0
|
||||||
|
|
||||||
mkdir -p "$install_root"
|
mkdir -p "$install_root"
|
||||||
zip_path="$(mktemp "$temporary_file_template")"
|
zip_path="${zip_path:-$(mktemp "$temporary_file_template")}"
|
||||||
say_verbose "Zip path: $zip_path"
|
say_verbose "Archive path: $zip_path"
|
||||||
|
|
||||||
for link_index in "${!download_links[@]}"
|
for link_index in "${!download_links[@]}"
|
||||||
do
|
do
|
||||||
@ -1433,7 +1651,7 @@ install_dotnet() {
|
|||||||
say "Failed to download $link_type link '$download_link': $download_error_msg"
|
say "Failed to download $link_type link '$download_link': $download_error_msg"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed"
|
rm -f "$zip_path" 2>&1 && say_verbose "Temporary archive file $zip_path was removed"
|
||||||
else
|
else
|
||||||
download_completed=true
|
download_completed=true
|
||||||
break
|
break
|
||||||
@ -1446,8 +1664,10 @@ install_dotnet() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
say "Extracting zip from $download_link"
|
remote_file_size="$(get_remote_file_size "$download_link")"
|
||||||
extract_dotnet_package "$zip_path" "$install_root" || return 1
|
|
||||||
|
say "Extracting archive from $download_link"
|
||||||
|
extract_dotnet_package "$zip_path" "$install_root" "$remote_file_size" || return 1
|
||||||
|
|
||||||
# Check if the SDK version is installed; if not, fail the installation.
|
# Check if the SDK version is installed; if not, fail the installation.
|
||||||
# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
|
# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
|
||||||
@ -1597,25 +1817,42 @@ do
|
|||||||
override_non_versioned_files=false
|
override_non_versioned_files=false
|
||||||
non_dynamic_parameters+=" $name"
|
non_dynamic_parameters+=" $name"
|
||||||
;;
|
;;
|
||||||
|
--keep-zip|-[Kk]eep[Zz]ip)
|
||||||
|
keep_zip=true
|
||||||
|
non_dynamic_parameters+=" $name"
|
||||||
|
;;
|
||||||
|
--zip-path|-[Zz]ip[Pp]ath)
|
||||||
|
shift
|
||||||
|
zip_path="$1"
|
||||||
|
;;
|
||||||
-?|--?|-h|--help|-[Hh]elp)
|
-?|--?|-h|--help|-[Hh]elp)
|
||||||
script_name="$(basename "$0")"
|
script_name="dotnet-install.sh"
|
||||||
echo ".NET Tools Installer"
|
echo ".NET Tools Installer"
|
||||||
echo "Usage: $script_name [-c|--channel <CHANNEL>] [-v|--version <VERSION>] [-p|--prefix <DESTINATION>]"
|
echo "Usage:"
|
||||||
|
echo " # Install a .NET SDK of a given Quality from a given Channel"
|
||||||
|
echo " $script_name [-c|--channel <CHANNEL>] [-q|--quality <QUALITY>]"
|
||||||
|
echo " # Install a .NET SDK of a specific public version"
|
||||||
|
echo " $script_name [-v|--version <VERSION>]"
|
||||||
echo " $script_name -h|-?|--help"
|
echo " $script_name -h|-?|--help"
|
||||||
echo ""
|
echo ""
|
||||||
echo "$script_name is a simple command line interface for obtaining dotnet cli."
|
echo "$script_name is a simple command line interface for obtaining dotnet cli."
|
||||||
|
echo " Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
||||||
|
echo " - The SDK needs to be installed without user interaction and without admin rights."
|
||||||
|
echo " - The SDK installation doesn't need to persist across multiple CI runs."
|
||||||
|
echo " To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer."
|
||||||
echo ""
|
echo ""
|
||||||
echo "Options:"
|
echo "Options:"
|
||||||
echo " -c,--channel <CHANNEL> Download from the channel specified, Defaults to \`$channel\`."
|
echo " -c,--channel <CHANNEL> Download from the channel specified, Defaults to \`$channel\`."
|
||||||
echo " -Channel"
|
echo " -Channel"
|
||||||
echo " Possible values:"
|
echo " Possible values:"
|
||||||
echo " - Current - most current release"
|
echo " - STS - the most recent Standard Term Support release"
|
||||||
echo " - LTS - most current supported release"
|
echo " - LTS - the most recent Long Term Support release"
|
||||||
echo " - 2-part version in a format A.B - represents a specific release"
|
echo " - 2-part version in a format A.B - represents a specific release"
|
||||||
echo " examples: 2.0; 1.0"
|
echo " examples: 2.0; 1.0"
|
||||||
echo " - 3-part version in a format A.B.Cxx - represents a specific SDK release"
|
echo " - 3-part version in a format A.B.Cxx - represents a specific SDK release"
|
||||||
echo " examples: 5.0.1xx, 5.0.2xx."
|
echo " examples: 5.0.1xx, 5.0.2xx."
|
||||||
echo " Supported since 5.0 release"
|
echo " Supported since 5.0 release"
|
||||||
|
echo " Warning: Value 'Current' is deprecated for the Channel parameter. Use 'STS' instead."
|
||||||
echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used."
|
echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used."
|
||||||
echo " -v,--version <VERSION> Use specific VERSION, Defaults to \`$version\`."
|
echo " -v,--version <VERSION> Use specific VERSION, Defaults to \`$version\`."
|
||||||
echo " -Version"
|
echo " -Version"
|
||||||
@ -1626,7 +1863,7 @@ do
|
|||||||
echo " -q,--quality <quality> Download the latest build of specified quality in the channel."
|
echo " -q,--quality <quality> Download the latest build of specified quality in the channel."
|
||||||
echo " -Quality"
|
echo " -Quality"
|
||||||
echo " The possible values are: daily, signed, validated, preview, GA."
|
echo " The possible values are: daily, signed, validated, preview, GA."
|
||||||
echo " Works only in combination with channel. Not applicable for current and LTS channels and will be ignored if those channels are used."
|
echo " Works only in combination with channel. Not applicable for STS and LTS channels and will be ignored if those channels are used."
|
||||||
echo " For SDK use channel in A.B.Cxx format. Using quality for SDK together with channel in A.B format is not supported."
|
echo " For SDK use channel in A.B.Cxx format. Using quality for SDK together with channel in A.B format is not supported."
|
||||||
echo " Supported since 5.0 release."
|
echo " Supported since 5.0 release."
|
||||||
echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality."
|
echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality."
|
||||||
@ -1637,7 +1874,7 @@ do
|
|||||||
echo " -InstallDir"
|
echo " -InstallDir"
|
||||||
echo " --architecture <ARCHITECTURE> Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
|
echo " --architecture <ARCHITECTURE> Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
|
||||||
echo " --arch,-Architecture,-Arch"
|
echo " --arch,-Architecture,-Arch"
|
||||||
echo " Possible values: x64, arm, arm64 and s390x"
|
echo " Possible values: x64, arm, arm64, s390x, ppc64le and loongarch64"
|
||||||
echo " --os <system> Specifies operating system to be used when selecting the installer."
|
echo " --os <system> Specifies operating system to be used when selecting the installer."
|
||||||
echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6."
|
echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6."
|
||||||
echo " In case any other value is provided, the platform will be determined by the script based on machine configuration."
|
echo " In case any other value is provided, the platform will be determined by the script based on machine configuration."
|
||||||
@ -1662,6 +1899,8 @@ do
|
|||||||
echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
|
echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
|
||||||
echo " --jsonfile <JSONFILE> Determines the SDK version from a user specified global.json file."
|
echo " --jsonfile <JSONFILE> Determines the SDK version from a user specified global.json file."
|
||||||
echo " Note: global.json must have a value for 'SDK:Version'"
|
echo " Note: global.json must have a value for 'SDK:Version'"
|
||||||
|
echo " --keep-zip,-KeepZip If set, downloaded file is kept."
|
||||||
|
echo " --zip-path, -ZipPath If set, downloaded file is stored at the specified path."
|
||||||
echo " -?,--?,-h,--help,-Help Shows this help message"
|
echo " -?,--?,-h,--help,-Help Shows this help message"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Install Location:"
|
echo "Install Location:"
|
||||||
@ -1680,10 +1919,10 @@ do
|
|||||||
shift
|
shift
|
||||||
done
|
done
|
||||||
|
|
||||||
say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
say_verbose "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
||||||
say "- The SDK needs to be installed without user interaction and without admin rights."
|
say_verbose "- The SDK needs to be installed without user interaction and without admin rights."
|
||||||
say "- The SDK installation doesn't need to persist across multiple CI runs."
|
say_verbose "- The SDK installation doesn't need to persist across multiple CI runs."
|
||||||
say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n"
|
say_verbose "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n"
|
||||||
|
|
||||||
if [ "$internal" = true ] && [ -z "$(echo $feed_credential)" ]; then
|
if [ "$internal" = true ] && [ -z "$(echo $feed_credential)" ]; then
|
||||||
message="Provide credentials via --feed-credential parameter."
|
message="Provide credentials via --feed-credential parameter."
|
||||||
@ -1716,5 +1955,5 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
say "Note that the script does not resolve dependencies during installation."
|
say "Note that the script does not resolve dependencies during installation."
|
||||||
say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section."
|
say "To check the list of dependencies, go to https://learn.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section."
|
||||||
say "Installation finished successfully."
|
say "Installation finished successfully."
|
||||||
|
310
src/authutil.ts
310
src/authutil.ts
@ -1,155 +1,155 @@
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import * as github from '@actions/github';
|
import * as github from '@actions/github';
|
||||||
import * as xmlbuilder from 'xmlbuilder';
|
import * as xmlbuilder from 'xmlbuilder';
|
||||||
import * as xmlParser from 'fast-xml-parser';
|
import * as xmlParser from 'fast-xml-parser';
|
||||||
import {ProcessEnvOptions} from 'child_process';
|
import {ProcessEnvOptions} from 'child_process';
|
||||||
|
|
||||||
export function configAuthentication(
|
export function configAuthentication(
|
||||||
feedUrl: string,
|
feedUrl: string,
|
||||||
existingFileLocation: string = '',
|
existingFileLocation: string = '',
|
||||||
processRoot: string = process.cwd()
|
processRoot: string = process.cwd()
|
||||||
) {
|
) {
|
||||||
const existingNuGetConfig: string = path.resolve(
|
const existingNuGetConfig: string = path.resolve(
|
||||||
processRoot,
|
processRoot,
|
||||||
existingFileLocation === ''
|
existingFileLocation === ''
|
||||||
? getExistingNugetConfig(processRoot)
|
? getExistingNugetConfig(processRoot)
|
||||||
: existingFileLocation
|
: existingFileLocation
|
||||||
);
|
);
|
||||||
|
|
||||||
const tempNuGetConfig: string = path.resolve(
|
const tempNuGetConfig: string = path.resolve(
|
||||||
processRoot,
|
processRoot,
|
||||||
'../',
|
'../',
|
||||||
'nuget.config'
|
'nuget.config'
|
||||||
);
|
);
|
||||||
|
|
||||||
writeFeedToFile(feedUrl, existingNuGetConfig, tempNuGetConfig);
|
writeFeedToFile(feedUrl, existingNuGetConfig, tempNuGetConfig);
|
||||||
}
|
}
|
||||||
|
|
||||||
function isValidKey(key: string): boolean {
|
function isValidKey(key: string): boolean {
|
||||||
return /^[\w\-\.]+$/i.test(key);
|
return /^[\w\-\.]+$/i.test(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getExistingNugetConfig(processRoot: string) {
|
function getExistingNugetConfig(processRoot: string) {
|
||||||
const defaultConfigName = 'nuget.config';
|
const defaultConfigName = 'nuget.config';
|
||||||
const configFileNames = fs
|
const configFileNames = fs
|
||||||
.readdirSync(processRoot)
|
.readdirSync(processRoot)
|
||||||
.filter(filename => filename.toLowerCase() === defaultConfigName);
|
.filter(filename => filename.toLowerCase() === defaultConfigName);
|
||||||
if (configFileNames.length) {
|
if (configFileNames.length) {
|
||||||
return configFileNames[0];
|
return configFileNames[0];
|
||||||
}
|
}
|
||||||
return defaultConfigName;
|
return defaultConfigName;
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeFeedToFile(
|
function writeFeedToFile(
|
||||||
feedUrl: string,
|
feedUrl: string,
|
||||||
existingFileLocation: string,
|
existingFileLocation: string,
|
||||||
tempFileLocation: string
|
tempFileLocation: string
|
||||||
) {
|
) {
|
||||||
console.log(
|
console.log(
|
||||||
`dotnet-auth: Finding any source references in ${existingFileLocation}, writing a new temporary configuration file with credentials to ${tempFileLocation}`
|
`dotnet-auth: Finding any source references in ${existingFileLocation}, writing a new temporary configuration file with credentials to ${tempFileLocation}`
|
||||||
);
|
);
|
||||||
let xml: xmlbuilder.XMLElement;
|
let xml: xmlbuilder.XMLElement;
|
||||||
let sourceKeys: string[] = [];
|
let sourceKeys: string[] = [];
|
||||||
let owner: string = core.getInput('owner');
|
let owner: string = core.getInput('owner');
|
||||||
let sourceUrl: string = feedUrl;
|
let sourceUrl: string = feedUrl;
|
||||||
if (!owner) {
|
if (!owner) {
|
||||||
owner = github.context.repo.owner;
|
owner = github.context.repo.owner;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!process.env.NUGET_AUTH_TOKEN || process.env.NUGET_AUTH_TOKEN == '') {
|
if (!process.env.NUGET_AUTH_TOKEN || process.env.NUGET_AUTH_TOKEN == '') {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'The NUGET_AUTH_TOKEN environment variable was not provided. In this step, add the following: \r\nenv:\r\n NUGET_AUTH_TOKEN: ${{secrets.GITHUB_TOKEN}}'
|
'The NUGET_AUTH_TOKEN environment variable was not provided. In this step, add the following: \r\nenv:\r\n NUGET_AUTH_TOKEN: ${{secrets.GITHUB_TOKEN}}'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fs.existsSync(existingFileLocation)) {
|
if (fs.existsSync(existingFileLocation)) {
|
||||||
// get key from existing NuGet.config so NuGet/dotnet can match credentials
|
// get key from existing NuGet.config so NuGet/dotnet can match credentials
|
||||||
const curContents: string = fs.readFileSync(existingFileLocation, 'utf8');
|
const curContents: string = fs.readFileSync(existingFileLocation, 'utf8');
|
||||||
var json = xmlParser.parse(curContents, {ignoreAttributes: false});
|
var json = xmlParser.parse(curContents, {ignoreAttributes: false});
|
||||||
|
|
||||||
if (typeof json.configuration == 'undefined') {
|
if (typeof json.configuration == 'undefined') {
|
||||||
throw new Error(`The provided NuGet.config seems invalid.`);
|
throw new Error(`The provided NuGet.config seems invalid.`);
|
||||||
}
|
}
|
||||||
if (typeof json.configuration.packageSources != 'undefined') {
|
if (typeof json.configuration.packageSources != 'undefined') {
|
||||||
if (typeof json.configuration.packageSources.add != 'undefined') {
|
if (typeof json.configuration.packageSources.add != 'undefined') {
|
||||||
// file has at least one <add>
|
// file has at least one <add>
|
||||||
if (typeof json.configuration.packageSources.add[0] == 'undefined') {
|
if (typeof json.configuration.packageSources.add[0] == 'undefined') {
|
||||||
// file has only one <add>
|
// file has only one <add>
|
||||||
if (
|
if (
|
||||||
json.configuration.packageSources.add['@_value']
|
json.configuration.packageSources.add['@_value']
|
||||||
.toLowerCase()
|
.toLowerCase()
|
||||||
.includes(feedUrl.toLowerCase())
|
.includes(feedUrl.toLowerCase())
|
||||||
) {
|
) {
|
||||||
let key = json.configuration.packageSources.add['@_key'];
|
let key = json.configuration.packageSources.add['@_key'];
|
||||||
sourceKeys.push(key);
|
sourceKeys.push(key);
|
||||||
core.debug(`Found a URL with key ${key}`);
|
core.debug(`Found a URL with key ${key}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// file has 2+ <add>
|
// file has 2+ <add>
|
||||||
for (
|
for (
|
||||||
let i = 0;
|
let i = 0;
|
||||||
i < json.configuration.packageSources.add.length;
|
i < json.configuration.packageSources.add.length;
|
||||||
i++
|
i++
|
||||||
) {
|
) {
|
||||||
const source = json.configuration.packageSources.add[i];
|
const source = json.configuration.packageSources.add[i];
|
||||||
const value = source['@_value'];
|
const value = source['@_value'];
|
||||||
core.debug(`source '${value}'`);
|
core.debug(`source '${value}'`);
|
||||||
if (value.toLowerCase().includes(feedUrl.toLowerCase())) {
|
if (value.toLowerCase().includes(feedUrl.toLowerCase())) {
|
||||||
let key = source['@_key'];
|
let key = source['@_key'];
|
||||||
sourceKeys.push(key);
|
sourceKeys.push(key);
|
||||||
core.debug(`Found a URL with key ${key}`);
|
core.debug(`Found a URL with key ${key}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
xml = xmlbuilder
|
xml = xmlbuilder
|
||||||
.create('configuration')
|
.create('configuration')
|
||||||
.ele('config')
|
.ele('config')
|
||||||
.ele('add', {key: 'defaultPushSource', value: sourceUrl})
|
.ele('add', {key: 'defaultPushSource', value: sourceUrl})
|
||||||
.up()
|
.up()
|
||||||
.up();
|
.up();
|
||||||
|
|
||||||
if (sourceKeys.length == 0) {
|
if (sourceKeys.length == 0) {
|
||||||
let keystring = 'Source';
|
let keystring = 'Source';
|
||||||
xml = xml
|
xml = xml
|
||||||
.ele('packageSources')
|
.ele('packageSources')
|
||||||
.ele('add', {key: keystring, value: sourceUrl})
|
.ele('add', {key: keystring, value: sourceUrl})
|
||||||
.up()
|
.up()
|
||||||
.up();
|
.up();
|
||||||
sourceKeys.push(keystring);
|
sourceKeys.push(keystring);
|
||||||
}
|
}
|
||||||
xml = xml.ele('packageSourceCredentials');
|
xml = xml.ele('packageSourceCredentials');
|
||||||
|
|
||||||
sourceKeys.forEach(key => {
|
sourceKeys.forEach(key => {
|
||||||
if (!isValidKey(key)) {
|
if (!isValidKey(key)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Source name can contain letters, numbers, and '-', '_', '.' symbols only. Please, fix source name in NuGet.config and try again."
|
"Source name can contain letters, numbers, and '-', '_', '.' symbols only. Please, fix source name in NuGet.config and try again."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
xml = xml
|
xml = xml
|
||||||
.ele(key)
|
.ele(key)
|
||||||
.ele('add', {key: 'Username', value: owner})
|
.ele('add', {key: 'Username', value: owner})
|
||||||
.up()
|
.up()
|
||||||
.ele('add', {
|
.ele('add', {
|
||||||
key: 'ClearTextPassword',
|
key: 'ClearTextPassword',
|
||||||
value: process.env.NUGET_AUTH_TOKEN
|
value: process.env.NUGET_AUTH_TOKEN
|
||||||
})
|
})
|
||||||
.up()
|
.up()
|
||||||
.up();
|
.up();
|
||||||
});
|
});
|
||||||
|
|
||||||
// If NuGet fixes itself such that on Linux it can look for environment variables in the config file (it doesn't seem to work today),
|
// If NuGet fixes itself such that on Linux it can look for environment variables in the config file (it doesn't seem to work today),
|
||||||
// use this for the value above
|
// use this for the value above
|
||||||
// process.platform == 'win32'
|
// process.platform == 'win32'
|
||||||
// ? '%NUGET_AUTH_TOKEN%'
|
// ? '%NUGET_AUTH_TOKEN%'
|
||||||
// : '$NUGET_AUTH_TOKEN'
|
// : '$NUGET_AUTH_TOKEN'
|
||||||
|
|
||||||
var output = xml.end({pretty: true});
|
var output = xml.end({pretty: true});
|
||||||
fs.writeFileSync(tempFileLocation, output);
|
fs.writeFileSync(tempFileLocation, output);
|
||||||
}
|
}
|
||||||
|
609
src/installer.ts
609
src/installer.ts
@ -1,304 +1,305 @@
|
|||||||
// Load tempDirectory before it gets wiped by tool-cache
|
// Load tempDirectory before it gets wiped by tool-cache
|
||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import * as exec from '@actions/exec';
|
import * as exec from '@actions/exec';
|
||||||
import * as io from '@actions/io';
|
import * as io from '@actions/io';
|
||||||
import hc = require('@actions/http-client');
|
import hc = require('@actions/http-client');
|
||||||
import {chmodSync} from 'fs';
|
import {chmodSync} from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import {ExecOptions} from '@actions/exec/lib/interfaces';
|
import {ExecOptions} from '@actions/exec/lib/interfaces';
|
||||||
import * as semver from 'semver';
|
import * as semver from 'semver';
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents the inputted version information
|
* Represents the inputted version information
|
||||||
*/
|
*/
|
||||||
export class DotNetVersionInfo {
|
export class DotNetVersionInfo {
|
||||||
public inputVersion: string;
|
public inputVersion: string;
|
||||||
private fullversion: string;
|
private fullversion: string;
|
||||||
private isExactVersionSet: boolean = false;
|
private isExactVersionSet: boolean = false;
|
||||||
|
|
||||||
constructor(version: string) {
|
constructor(version: string) {
|
||||||
this.inputVersion = version;
|
this.inputVersion = version;
|
||||||
|
|
||||||
// Check for exact match
|
// Check for exact match
|
||||||
if (semver.valid(semver.clean(version) || '') != null) {
|
if (semver.valid(semver.clean(version) || '') != null) {
|
||||||
this.fullversion = semver.clean(version) as string;
|
this.fullversion = semver.clean(version) as string;
|
||||||
this.isExactVersionSet = true;
|
this.isExactVersionSet = true;
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const parts: string[] = version.split('.');
|
const parts: string[] = version.split('.');
|
||||||
|
|
||||||
if (parts.length < 2 || parts.length > 3) this.throwInvalidVersionFormat();
|
if (parts.length < 2 || parts.length > 3) this.throwInvalidVersionFormat();
|
||||||
|
|
||||||
if (parts.length == 3 && parts[2] !== 'x' && parts[2] !== '*') {
|
if (parts.length == 3 && parts[2] !== 'x' && parts[2] !== '*') {
|
||||||
this.throwInvalidVersionFormat();
|
this.throwInvalidVersionFormat();
|
||||||
}
|
}
|
||||||
|
|
||||||
const major = this.getVersionNumberOrThrow(parts[0]);
|
const major = this.getVersionNumberOrThrow(parts[0]);
|
||||||
const minor = ['x', '*'].includes(parts[1])
|
const minor = ['x', '*'].includes(parts[1])
|
||||||
? parts[1]
|
? parts[1]
|
||||||
: this.getVersionNumberOrThrow(parts[1]);
|
: this.getVersionNumberOrThrow(parts[1]);
|
||||||
|
|
||||||
this.fullversion = major + '.' + minor;
|
this.fullversion = major + '.' + minor;
|
||||||
}
|
}
|
||||||
|
|
||||||
private getVersionNumberOrThrow(input: string): number {
|
private getVersionNumberOrThrow(input: string): number {
|
||||||
try {
|
try {
|
||||||
if (!input || input.trim() === '') this.throwInvalidVersionFormat();
|
if (!input || input.trim() === '') this.throwInvalidVersionFormat();
|
||||||
|
|
||||||
let number = Number(input);
|
let number = Number(input);
|
||||||
|
|
||||||
if (Number.isNaN(number) || number < 0) this.throwInvalidVersionFormat();
|
if (Number.isNaN(number) || number < 0) this.throwInvalidVersionFormat();
|
||||||
|
|
||||||
return number;
|
return number;
|
||||||
} catch {
|
} catch {
|
||||||
this.throwInvalidVersionFormat();
|
this.throwInvalidVersionFormat();
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private throwInvalidVersionFormat() {
|
private throwInvalidVersionFormat() {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Invalid version format! Supported: 1.2.3, 1.2, 1.2.x, 1.2.*'
|
'Invalid version format! Supported: 1.2.3, 1.2, 1.2.x, 1.2.*'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If true exacatly one version should be resolved
|
* If true exacatly one version should be resolved
|
||||||
*/
|
*/
|
||||||
public isExactVersion(): boolean {
|
public isExactVersion(): boolean {
|
||||||
return this.isExactVersionSet;
|
return this.isExactVersionSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
public version(): string {
|
public version(): string {
|
||||||
return this.fullversion;
|
return this.fullversion;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class DotnetCoreInstaller {
|
export class DotnetCoreInstaller {
|
||||||
constructor(version: string, includePrerelease: boolean = false) {
|
constructor(version: string, includePrerelease: boolean = false) {
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.includePrerelease = includePrerelease;
|
this.includePrerelease = includePrerelease;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async installDotnet() {
|
public async installDotnet() {
|
||||||
let output = '';
|
let output = '';
|
||||||
let resultCode = 0;
|
let resultCode = 0;
|
||||||
|
|
||||||
let calculatedVersion = await this.resolveVersion(
|
let calculatedVersion = await this.resolveVersion(
|
||||||
new DotNetVersionInfo(this.version)
|
new DotNetVersionInfo(this.version)
|
||||||
);
|
);
|
||||||
|
|
||||||
var envVariables: {[key: string]: string} = {};
|
var envVariables: {[key: string]: string} = {};
|
||||||
for (let key in process.env) {
|
for (let key in process.env) {
|
||||||
if (process.env[key]) {
|
if (process.env[key]) {
|
||||||
let value: any = process.env[key];
|
let value: any = process.env[key];
|
||||||
envVariables[key] = value;
|
envVariables[key] = value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
let escapedScript = path
|
let escapedScript = path
|
||||||
.join(__dirname, '..', 'externals', 'install-dotnet.ps1')
|
.join(__dirname, '..', 'externals', 'install-dotnet.ps1')
|
||||||
.replace(/'/g, "''");
|
.replace(/'/g, "''");
|
||||||
let command = `& '${escapedScript}'`;
|
let command = `& '${escapedScript}'`;
|
||||||
if (calculatedVersion) {
|
if (calculatedVersion) {
|
||||||
command += ` -Version ${calculatedVersion}`;
|
command += ` -Version ${calculatedVersion}`;
|
||||||
}
|
}
|
||||||
if (process.env['https_proxy'] != null) {
|
if (process.env['https_proxy'] != null) {
|
||||||
command += ` -ProxyAddress ${process.env['https_proxy']}`;
|
command += ` -ProxyAddress ${process.env['https_proxy']}`;
|
||||||
}
|
}
|
||||||
// This is not currently an option
|
// This is not currently an option
|
||||||
if (process.env['no_proxy'] != null) {
|
if (process.env['no_proxy'] != null) {
|
||||||
command += ` -ProxyBypassList ${process.env['no_proxy']}`;
|
command += ` -ProxyBypassList ${process.env['no_proxy']}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
|
// process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
|
||||||
const powershellPath =
|
const powershellPath =
|
||||||
(await io.which('pwsh', false)) || (await io.which('powershell', true));
|
(await io.which('pwsh', false)) || (await io.which('powershell', true));
|
||||||
|
|
||||||
var options: ExecOptions = {
|
var options: ExecOptions = {
|
||||||
listeners: {
|
listeners: {
|
||||||
stdout: (data: Buffer) => {
|
stdout: (data: Buffer) => {
|
||||||
output += data.toString();
|
output += data.toString();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
env: envVariables
|
env: envVariables
|
||||||
};
|
};
|
||||||
|
|
||||||
resultCode = await exec.exec(
|
resultCode = await exec.exec(
|
||||||
`"${powershellPath}"`,
|
`"${powershellPath}"`,
|
||||||
[
|
[
|
||||||
'-NoLogo',
|
'-NoLogo',
|
||||||
'-Sta',
|
'-Sta',
|
||||||
'-NoProfile',
|
'-NoProfile',
|
||||||
'-NonInteractive',
|
'-NonInteractive',
|
||||||
'-ExecutionPolicy',
|
'-ExecutionPolicy',
|
||||||
'Unrestricted',
|
'Unrestricted',
|
||||||
'-Command',
|
'-Command',
|
||||||
command
|
command
|
||||||
],
|
],
|
||||||
options
|
options
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
let escapedScript = path
|
let escapedScript = path
|
||||||
.join(__dirname, '..', 'externals', 'install-dotnet.sh')
|
.join(__dirname, '..', 'externals', 'install-dotnet.sh')
|
||||||
.replace(/'/g, "''");
|
.replace(/'/g, "''");
|
||||||
chmodSync(escapedScript, '777');
|
chmodSync(escapedScript, '777');
|
||||||
|
|
||||||
const scriptPath = await io.which(escapedScript, true);
|
const scriptPath = await io.which(escapedScript, true);
|
||||||
|
|
||||||
let scriptArguments: string[] = [];
|
let scriptArguments: string[] = [];
|
||||||
if (calculatedVersion) {
|
if (calculatedVersion) {
|
||||||
scriptArguments.push('--version', calculatedVersion);
|
scriptArguments.push('--version', calculatedVersion);
|
||||||
}
|
}
|
||||||
|
|
||||||
// process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
|
// process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
|
||||||
resultCode = await exec.exec(`"${scriptPath}"`, scriptArguments, {
|
resultCode = await exec.exec(`"${scriptPath}"`, scriptArguments, {
|
||||||
listeners: {
|
listeners: {
|
||||||
stdout: (data: Buffer) => {
|
stdout: (data: Buffer) => {
|
||||||
output += data.toString();
|
output += data.toString();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
env: envVariables
|
env: envVariables
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (resultCode != 0) {
|
if (resultCode != 0) {
|
||||||
throw new Error(`Failed to install dotnet ${resultCode}. ${output}`);
|
throw new Error(`Failed to install dotnet ${resultCode}. ${output}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static addToPath() {
|
static addToPath() {
|
||||||
if (process.env['DOTNET_INSTALL_DIR']) {
|
if (process.env['DOTNET_INSTALL_DIR']) {
|
||||||
core.addPath(process.env['DOTNET_INSTALL_DIR']);
|
core.addPath(process.env['DOTNET_INSTALL_DIR']);
|
||||||
core.exportVariable('DOTNET_ROOT', process.env['DOTNET_INSTALL_DIR']);
|
core.exportVariable('DOTNET_ROOT', process.env['DOTNET_INSTALL_DIR']);
|
||||||
} else {
|
} else {
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
// This is the default set in install-dotnet.ps1
|
// This is the default set in install-dotnet.ps1
|
||||||
core.addPath(
|
core.addPath(
|
||||||
path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet')
|
path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet')
|
||||||
);
|
);
|
||||||
core.exportVariable(
|
core.exportVariable(
|
||||||
'DOTNET_ROOT',
|
'DOTNET_ROOT',
|
||||||
path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet')
|
path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet')
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// This is the default set in install-dotnet.sh
|
// This is the default set in install-dotnet.sh
|
||||||
core.addPath(path.join(process.env['HOME'] + '', '.dotnet'));
|
core.addPath(path.join(process.env['HOME'] + '', '.dotnet'));
|
||||||
core.exportVariable(
|
core.exportVariable(
|
||||||
'DOTNET_ROOT',
|
'DOTNET_ROOT',
|
||||||
path.join(process.env['HOME'] + '', '.dotnet')
|
path.join(process.env['HOME'] + '', '.dotnet')
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(process.env['PATH']);
|
console.log(process.env['PATH']);
|
||||||
}
|
}
|
||||||
|
|
||||||
// versionInfo - versionInfo of the SDK/Runtime
|
// versionInfo - versionInfo of the SDK/Runtime
|
||||||
async resolveVersion(versionInfo: DotNetVersionInfo): Promise<string> {
|
async resolveVersion(versionInfo: DotNetVersionInfo): Promise<string> {
|
||||||
if (versionInfo.isExactVersion()) {
|
if (versionInfo.isExactVersion()) {
|
||||||
return versionInfo.version();
|
return versionInfo.version();
|
||||||
}
|
}
|
||||||
|
|
||||||
const httpClient = new hc.HttpClient('actions/setup-dotnet', [], {
|
const httpClient = new hc.HttpClient('actions/setup-dotnet', [], {
|
||||||
allowRetries: true,
|
allowRetries: true,
|
||||||
maxRetries: 3
|
maxRetries: 3
|
||||||
});
|
});
|
||||||
|
|
||||||
const releasesJsonUrl: string = await this.getReleasesJsonUrl(
|
const releasesJsonUrl: string = await this.getReleasesJsonUrl(
|
||||||
httpClient,
|
httpClient,
|
||||||
versionInfo.version().split('.')
|
versionInfo.version().split('.')
|
||||||
);
|
);
|
||||||
|
|
||||||
const releasesResponse = await httpClient.getJson<any>(releasesJsonUrl);
|
const releasesResponse = await httpClient.getJson<any>(releasesJsonUrl);
|
||||||
const releasesResult = releasesResponse.result || {};
|
const releasesResult = releasesResponse.result || {};
|
||||||
let releasesInfo: any[] = releasesResult['releases'];
|
let releasesInfo: any[] = releasesResult['releases'];
|
||||||
releasesInfo = releasesInfo.filter((releaseInfo: any) => {
|
releasesInfo = releasesInfo.filter((releaseInfo: any) => {
|
||||||
return (
|
return (
|
||||||
semver.satisfies(releaseInfo['sdk']['version'], versionInfo.version(), {
|
semver.satisfies(releaseInfo['sdk']['version'], versionInfo.version(), {
|
||||||
includePrerelease: this.includePrerelease
|
includePrerelease: this.includePrerelease
|
||||||
}) ||
|
}) ||
|
||||||
semver.satisfies(
|
semver.satisfies(
|
||||||
releaseInfo['sdk']['version-display'],
|
releaseInfo['sdk']['version-display'],
|
||||||
versionInfo.version(),
|
versionInfo.version(),
|
||||||
{
|
{
|
||||||
includePrerelease: this.includePrerelease
|
includePrerelease: this.includePrerelease
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Exclude versions that are newer than the latest if using not exact
|
// Exclude versions that are newer than the latest if using not exact
|
||||||
let latestSdk: string = releasesResult['latest-sdk'];
|
let latestSdk: string = releasesResult['latest-sdk'];
|
||||||
|
|
||||||
releasesInfo = releasesInfo.filter((releaseInfo: any) =>
|
releasesInfo = releasesInfo.filter((releaseInfo: any) =>
|
||||||
semver.lte(releaseInfo['sdk']['version'], latestSdk, {
|
semver.lte(releaseInfo['sdk']['version'], latestSdk, {
|
||||||
includePrerelease: this.includePrerelease
|
includePrerelease: this.includePrerelease
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
// Sort for latest version
|
// Sort for latest version
|
||||||
releasesInfo = releasesInfo.sort((a, b) =>
|
releasesInfo = releasesInfo.sort((a, b) =>
|
||||||
semver.rcompare(a['sdk']['version'], b['sdk']['version'], {
|
semver.rcompare(a['sdk']['version'], b['sdk']['version'], {
|
||||||
includePrerelease: this.includePrerelease
|
includePrerelease: this.includePrerelease
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
if (releasesInfo.length == 0) {
|
if (releasesInfo.length == 0) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Could not find dotnet core version. Please ensure that specified version ${versionInfo.inputVersion} is valid.`
|
`Could not find dotnet core version. Please ensure that specified version ${versionInfo.inputVersion} is valid.`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let release = releasesInfo[0];
|
let release = releasesInfo[0];
|
||||||
return release['sdk']['version'];
|
return release['sdk']['version'];
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getReleasesJsonUrl(
|
private async getReleasesJsonUrl(
|
||||||
httpClient: hc.HttpClient,
|
httpClient: hc.HttpClient,
|
||||||
versionParts: string[]
|
versionParts: string[]
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const response = await httpClient.getJson<any>(DotNetCoreIndexUrl);
|
const response = await httpClient.getJson<any>(DotNetCoreIndexUrl);
|
||||||
const result = response.result || {};
|
|
||||||
let releasesInfo: any[] = result['releases-index'];
|
const result = response.result || {};
|
||||||
|
let releasesInfo: any[] = result['releases-index'];
|
||||||
releasesInfo = releasesInfo.filter((info: any) => {
|
|
||||||
// channel-version is the first 2 elements of the version (e.g. 2.1), filter out versions that don't match 2.1.x.
|
releasesInfo = releasesInfo.filter((info: any) => {
|
||||||
const sdkParts: string[] = info['channel-version'].split('.');
|
// channel-version is the first 2 elements of the version (e.g. 2.1), filter out versions that don't match 2.1.x.
|
||||||
if (
|
const sdkParts: string[] = info['channel-version'].split('.');
|
||||||
versionParts.length >= 2 &&
|
if (
|
||||||
!(versionParts[1] == 'x' || versionParts[1] == '*')
|
versionParts.length >= 2 &&
|
||||||
) {
|
!(versionParts[1] == 'x' || versionParts[1] == '*')
|
||||||
return versionParts[0] == sdkParts[0] && versionParts[1] == sdkParts[1];
|
) {
|
||||||
}
|
return versionParts[0] == sdkParts[0] && versionParts[1] == sdkParts[1];
|
||||||
return versionParts[0] == sdkParts[0];
|
}
|
||||||
});
|
return versionParts[0] == sdkParts[0];
|
||||||
|
});
|
||||||
if (releasesInfo.length === 0) {
|
|
||||||
throw new Error(
|
if (releasesInfo.length === 0) {
|
||||||
`Could not find info for version ${versionParts.join(
|
throw new Error(
|
||||||
'.'
|
`Could not find info for version ${versionParts.join(
|
||||||
)} at ${DotNetCoreIndexUrl}`
|
'.'
|
||||||
);
|
)} at ${DotNetCoreIndexUrl}`
|
||||||
}
|
);
|
||||||
|
}
|
||||||
const releaseInfo = releasesInfo[0];
|
|
||||||
if (releaseInfo['support-phase'] === 'eol') {
|
const releaseInfo = releasesInfo[0];
|
||||||
core.warning(
|
if (releaseInfo['support-phase'] === 'eol') {
|
||||||
`${releaseInfo['product']} ${releaseInfo['channel-version']} is no longer supported and will not receive security updates in the future. Please refer to https://aka.ms/dotnet-core-support for more information about the .NET support policy.`
|
core.warning(
|
||||||
);
|
`${releaseInfo['product']} ${releaseInfo['channel-version']} is no longer supported and will not receive security updates in the future. Please refer to https://aka.ms/dotnet-core-support for more information about the .NET support policy.`
|
||||||
}
|
);
|
||||||
|
}
|
||||||
return releaseInfo['releases.json'];
|
|
||||||
}
|
return releaseInfo['releases.json'];
|
||||||
|
}
|
||||||
private version: string;
|
|
||||||
private includePrerelease: boolean;
|
private version: string;
|
||||||
}
|
private includePrerelease: boolean;
|
||||||
|
}
|
||||||
const DotNetCoreIndexUrl: string =
|
|
||||||
'https://dotnetcli.blob.core.windows.net/dotnet/release-metadata/releases-index.json';
|
const DotNetCoreIndexUrl: string =
|
||||||
|
'https://builds.dotnet.microsoft.com/dotnet/release-metadata/releases-index.json';
|
||||||
|
@ -38,9 +38,8 @@ export async function run() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (versions.length) {
|
if (versions.length) {
|
||||||
const includePrerelease: boolean = core.getBooleanInput(
|
const includePrerelease: boolean =
|
||||||
'include-prerelease'
|
core.getBooleanInput('include-prerelease');
|
||||||
);
|
|
||||||
let dotnetInstaller!: installer.DotnetCoreInstaller;
|
let dotnetInstaller!: installer.DotnetCoreInstaller;
|
||||||
for (const version of new Set<string>(versions)) {
|
for (const version of new Set<string>(versions)) {
|
||||||
dotnetInstaller = new installer.DotnetCoreInstaller(
|
dotnetInstaller = new installer.DotnetCoreInstaller(
|
||||||
|
Reference in New Issue
Block a user